1 /* Expand builtin functions.
2 Copyright (C) 1988-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
26 #include "coretypes.h"
35 #include "stringpool.h"
37 #include "tree-ssanames.h"
42 #include "diagnostic-core.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "stor-layout.h"
49 #include "tree-object-size.h"
59 #include "typeclass.h"
60 #include "langhooks.h"
61 #include "value-prof.h"
63 #include "stringpool.h"
67 #include "tree-chkp.h"
69 #include "internal-fn.h"
70 #include "case-cfn-macros.h"
71 #include "gimple-fold.h"
74 struct target_builtins default_target_builtins
;
76 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
79 /* Define the names of the builtin function types and codes. */
80 const char *const built_in_class_names
[BUILT_IN_LAST
]
81 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
83 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
84 const char * built_in_names
[(int) END_BUILTINS
] =
86 #include "builtins.def"
89 /* Setup an array of builtin_info_type, make sure each element decl is
90 initialized to NULL_TREE. */
91 builtin_info_type builtin_info
[(int)END_BUILTINS
];
93 /* Non-zero if __builtin_constant_p should be folded right away. */
94 bool force_folding_builtin_constant_p
;
96 static rtx
c_readstr (const char *, scalar_int_mode
);
97 static int target_char_cast (tree
, char *);
98 static rtx
get_memory_rtx (tree
, tree
);
99 static int apply_args_size (void);
100 static int apply_result_size (void);
101 static rtx
result_vector (int, rtx
);
102 static void expand_builtin_prefetch (tree
);
103 static rtx
expand_builtin_apply_args (void);
104 static rtx
expand_builtin_apply_args_1 (void);
105 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
106 static void expand_builtin_return (rtx
);
107 static enum type_class
type_to_class (tree
);
108 static rtx
expand_builtin_classify_type (tree
);
109 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
110 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
111 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
112 static rtx
expand_builtin_sincos (tree
);
113 static rtx
expand_builtin_cexpi (tree
, rtx
);
114 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
115 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
116 static rtx
expand_builtin_next_arg (void);
117 static rtx
expand_builtin_va_start (tree
);
118 static rtx
expand_builtin_va_end (tree
);
119 static rtx
expand_builtin_va_copy (tree
);
120 static rtx
expand_builtin_strcmp (tree
, rtx
);
121 static rtx
expand_builtin_strncmp (tree
, rtx
, machine_mode
);
122 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, scalar_int_mode
);
123 static rtx
expand_builtin_memchr (tree
, rtx
);
124 static rtx
expand_builtin_memcpy (tree
, rtx
);
125 static rtx
expand_builtin_memcpy_with_bounds (tree
, rtx
);
126 static rtx
expand_builtin_memory_copy_args (tree dest
, tree src
, tree len
,
127 rtx target
, tree exp
, int endp
);
128 static rtx
expand_builtin_memmove (tree
, rtx
);
129 static rtx
expand_builtin_mempcpy (tree
, rtx
);
130 static rtx
expand_builtin_mempcpy_with_bounds (tree
, rtx
);
131 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
, tree
, int);
132 static rtx
expand_builtin_strcat (tree
, rtx
);
133 static rtx
expand_builtin_strcpy (tree
, rtx
);
134 static rtx
expand_builtin_strcpy_args (tree
, tree
, rtx
);
135 static rtx
expand_builtin_stpcpy (tree
, rtx
, machine_mode
);
136 static rtx
expand_builtin_stpncpy (tree
, rtx
);
137 static rtx
expand_builtin_strncat (tree
, rtx
);
138 static rtx
expand_builtin_strncpy (tree
, rtx
);
139 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, scalar_int_mode
);
140 static rtx
expand_builtin_memset (tree
, rtx
, machine_mode
);
141 static rtx
expand_builtin_memset_with_bounds (tree
, rtx
, machine_mode
);
142 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, machine_mode
, tree
);
143 static rtx
expand_builtin_bzero (tree
);
144 static rtx
expand_builtin_strlen (tree
, rtx
, machine_mode
);
145 static rtx
expand_builtin_alloca (tree
);
146 static rtx
expand_builtin_unop (machine_mode
, tree
, rtx
, rtx
, optab
);
147 static rtx
expand_builtin_frame_address (tree
, tree
);
148 static tree
stabilize_va_list_loc (location_t
, tree
, int);
149 static rtx
expand_builtin_expect (tree
, rtx
);
150 static tree
fold_builtin_constant_p (tree
);
151 static tree
fold_builtin_classify_type (tree
);
152 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
153 static tree
fold_builtin_inf (location_t
, tree
, int);
154 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
155 static bool validate_arg (const_tree
, enum tree_code code
);
156 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
157 static rtx
expand_builtin_signbit (tree
, rtx
);
158 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
159 static tree
fold_builtin_isascii (location_t
, tree
);
160 static tree
fold_builtin_toascii (location_t
, tree
);
161 static tree
fold_builtin_isdigit (location_t
, tree
);
162 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
163 static tree
fold_builtin_abs (location_t
, tree
, tree
);
164 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
166 static tree
fold_builtin_0 (location_t
, tree
);
167 static tree
fold_builtin_1 (location_t
, tree
, tree
);
168 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
);
169 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
);
170 static tree
fold_builtin_varargs (location_t
, tree
, tree
*, int);
172 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
173 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
174 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
176 static rtx
expand_builtin_object_size (tree
);
177 static rtx
expand_builtin_memory_chk (tree
, rtx
, machine_mode
,
178 enum built_in_function
);
179 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
180 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
181 static void maybe_emit_free_warning (tree
);
182 static tree
fold_builtin_object_size (tree
, tree
);
184 unsigned HOST_WIDE_INT target_newline
;
185 unsigned HOST_WIDE_INT target_percent
;
186 static unsigned HOST_WIDE_INT target_c
;
187 static unsigned HOST_WIDE_INT target_s
;
188 char target_percent_c
[3];
189 char target_percent_s
[3];
190 char target_percent_s_newline
[4];
191 static tree
do_mpfr_remquo (tree
, tree
, tree
);
192 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
193 static void expand_builtin_sync_synchronize (void);
195 /* Return true if NAME starts with __builtin_ or __sync_. */
198 is_builtin_name (const char *name
)
200 if (strncmp (name
, "__builtin_", 10) == 0)
202 if (strncmp (name
, "__sync_", 7) == 0)
204 if (strncmp (name
, "__atomic_", 9) == 0)
207 && (!strcmp (name
, "__cilkrts_detach")
208 || !strcmp (name
, "__cilkrts_pop_frame")))
214 /* Return true if DECL is a function symbol representing a built-in. */
217 is_builtin_fn (tree decl
)
219 return TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
);
222 /* Return true if NODE should be considered for inline expansion regardless
223 of the optimization level. This means whenever a function is invoked with
224 its "internal" name, which normally contains the prefix "__builtin". */
227 called_as_built_in (tree node
)
229 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
230 we want the name used to call the function, not the name it
232 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
233 return is_builtin_name (name
);
236 /* Compute values M and N such that M divides (address of EXP - N) and such
237 that N < M. If these numbers can be determined, store M in alignp and N in
238 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
239 *alignp and any bit-offset to *bitposp.
241 Note that the address (and thus the alignment) computed here is based
242 on the address to which a symbol resolves, whereas DECL_ALIGN is based
243 on the address at which an object is actually located. These two
244 addresses are not always the same. For example, on ARM targets,
245 the address &foo of a Thumb function foo() has the lowest bit set,
246 whereas foo() itself starts on an even address.
248 If ADDR_P is true we are taking the address of the memory reference EXP
249 and thus cannot rely on the access taking place. */
252 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
253 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
255 HOST_WIDE_INT bitsize
, bitpos
;
258 int unsignedp
, reversep
, volatilep
;
259 unsigned int align
= BITS_PER_UNIT
;
260 bool known_alignment
= false;
262 /* Get the innermost object and the constant (bitpos) and possibly
263 variable (offset) offset of the access. */
264 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
265 &unsignedp
, &reversep
, &volatilep
);
267 /* Extract alignment information from the innermost object and
268 possibly adjust bitpos and offset. */
269 if (TREE_CODE (exp
) == FUNCTION_DECL
)
271 /* Function addresses can encode extra information besides their
272 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
273 allows the low bit to be used as a virtual bit, we know
274 that the address itself must be at least 2-byte aligned. */
275 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
276 align
= 2 * BITS_PER_UNIT
;
278 else if (TREE_CODE (exp
) == LABEL_DECL
)
280 else if (TREE_CODE (exp
) == CONST_DECL
)
282 /* The alignment of a CONST_DECL is determined by its initializer. */
283 exp
= DECL_INITIAL (exp
);
284 align
= TYPE_ALIGN (TREE_TYPE (exp
));
285 if (CONSTANT_CLASS_P (exp
))
286 align
= targetm
.constant_alignment (exp
, align
);
288 known_alignment
= true;
290 else if (DECL_P (exp
))
292 align
= DECL_ALIGN (exp
);
293 known_alignment
= true;
295 else if (TREE_CODE (exp
) == INDIRECT_REF
296 || TREE_CODE (exp
) == MEM_REF
297 || TREE_CODE (exp
) == TARGET_MEM_REF
)
299 tree addr
= TREE_OPERAND (exp
, 0);
301 unsigned HOST_WIDE_INT ptr_bitpos
;
302 unsigned HOST_WIDE_INT ptr_bitmask
= ~0;
304 /* If the address is explicitely aligned, handle that. */
305 if (TREE_CODE (addr
) == BIT_AND_EXPR
306 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
308 ptr_bitmask
= TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1));
309 ptr_bitmask
*= BITS_PER_UNIT
;
310 align
= least_bit_hwi (ptr_bitmask
);
311 addr
= TREE_OPERAND (addr
, 0);
315 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
316 align
= MAX (ptr_align
, align
);
318 /* Re-apply explicit alignment to the bitpos. */
319 ptr_bitpos
&= ptr_bitmask
;
321 /* The alignment of the pointer operand in a TARGET_MEM_REF
322 has to take the variable offset parts into account. */
323 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
327 unsigned HOST_WIDE_INT step
= 1;
329 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
330 align
= MIN (align
, least_bit_hwi (step
) * BITS_PER_UNIT
);
332 if (TMR_INDEX2 (exp
))
333 align
= BITS_PER_UNIT
;
334 known_alignment
= false;
337 /* When EXP is an actual memory reference then we can use
338 TYPE_ALIGN of a pointer indirection to derive alignment.
339 Do so only if get_pointer_alignment_1 did not reveal absolute
340 alignment knowledge and if using that alignment would
341 improve the situation. */
343 if (!addr_p
&& !known_alignment
344 && (talign
= min_align_of_type (TREE_TYPE (exp
)) * BITS_PER_UNIT
)
349 /* Else adjust bitpos accordingly. */
350 bitpos
+= ptr_bitpos
;
351 if (TREE_CODE (exp
) == MEM_REF
352 || TREE_CODE (exp
) == TARGET_MEM_REF
)
353 bitpos
+= mem_ref_offset (exp
).to_short_addr () * BITS_PER_UNIT
;
356 else if (TREE_CODE (exp
) == STRING_CST
)
358 /* STRING_CST are the only constant objects we allow to be not
359 wrapped inside a CONST_DECL. */
360 align
= TYPE_ALIGN (TREE_TYPE (exp
));
361 if (CONSTANT_CLASS_P (exp
))
362 align
= targetm
.constant_alignment (exp
, align
);
364 known_alignment
= true;
367 /* If there is a non-constant offset part extract the maximum
368 alignment that can prevail. */
371 unsigned int trailing_zeros
= tree_ctz (offset
);
372 if (trailing_zeros
< HOST_BITS_PER_INT
)
374 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
376 align
= MIN (align
, inner
);
381 *bitposp
= bitpos
& (*alignp
- 1);
382 return known_alignment
;
385 /* For a memory reference expression EXP compute values M and N such that M
386 divides (&EXP - N) and such that N < M. If these numbers can be determined,
387 store M in alignp and N in *BITPOSP and return true. Otherwise return false
388 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
391 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
392 unsigned HOST_WIDE_INT
*bitposp
)
394 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
397 /* Return the alignment in bits of EXP, an object. */
400 get_object_alignment (tree exp
)
402 unsigned HOST_WIDE_INT bitpos
= 0;
405 get_object_alignment_1 (exp
, &align
, &bitpos
);
407 /* align and bitpos now specify known low bits of the pointer.
408 ptr & (align - 1) == bitpos. */
411 align
= least_bit_hwi (bitpos
);
415 /* For a pointer valued expression EXP compute values M and N such that M
416 divides (EXP - N) and such that N < M. If these numbers can be determined,
417 store M in alignp and N in *BITPOSP and return true. Return false if
418 the results are just a conservative approximation.
420 If EXP is not a pointer, false is returned too. */
423 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
424 unsigned HOST_WIDE_INT
*bitposp
)
428 if (TREE_CODE (exp
) == ADDR_EXPR
)
429 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
430 alignp
, bitposp
, true);
431 else if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
)
434 unsigned HOST_WIDE_INT bitpos
;
435 bool res
= get_pointer_alignment_1 (TREE_OPERAND (exp
, 0),
437 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
438 bitpos
+= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
;
441 unsigned int trailing_zeros
= tree_ctz (TREE_OPERAND (exp
, 1));
442 if (trailing_zeros
< HOST_BITS_PER_INT
)
444 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
446 align
= MIN (align
, inner
);
450 *bitposp
= bitpos
& (align
- 1);
453 else if (TREE_CODE (exp
) == SSA_NAME
454 && POINTER_TYPE_P (TREE_TYPE (exp
)))
456 unsigned int ptr_align
, ptr_misalign
;
457 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
459 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
461 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
462 *alignp
= ptr_align
* BITS_PER_UNIT
;
463 /* Make sure to return a sensible alignment when the multiplication
464 by BITS_PER_UNIT overflowed. */
466 *alignp
= 1u << (HOST_BITS_PER_INT
- 1);
467 /* We cannot really tell whether this result is an approximation. */
473 *alignp
= BITS_PER_UNIT
;
477 else if (TREE_CODE (exp
) == INTEGER_CST
)
479 *alignp
= BIGGEST_ALIGNMENT
;
480 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
481 & (BIGGEST_ALIGNMENT
- 1));
486 *alignp
= BITS_PER_UNIT
;
490 /* Return the alignment in bits of EXP, a pointer valued expression.
491 The alignment returned is, by default, the alignment of the thing that
492 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
494 Otherwise, look at the expression to see if we can do better, i.e., if the
495 expression is actually pointing at an object whose alignment is tighter. */
498 get_pointer_alignment (tree exp
)
500 unsigned HOST_WIDE_INT bitpos
= 0;
503 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
505 /* align and bitpos now specify known low bits of the pointer.
506 ptr & (align - 1) == bitpos. */
509 align
= least_bit_hwi (bitpos
);
514 /* Return the number of non-zero elements in the sequence
515 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
516 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
519 string_length (const void *ptr
, unsigned eltsize
, unsigned maxelts
)
521 gcc_checking_assert (eltsize
== 1 || eltsize
== 2 || eltsize
== 4);
527 /* Optimize the common case of plain char. */
528 for (n
= 0; n
< maxelts
; n
++)
530 const char *elt
= (const char*) ptr
+ n
;
537 for (n
= 0; n
< maxelts
; n
++)
539 const char *elt
= (const char*) ptr
+ n
* eltsize
;
540 if (!memcmp (elt
, "\0\0\0\0", eltsize
))
547 /* Compute the length of a null-terminated character string or wide
548 character string handling character sizes of 1, 2, and 4 bytes.
549 TREE_STRING_LENGTH is not the right way because it evaluates to
550 the size of the character array in bytes (as opposed to characters)
551 and because it can contain a zero byte in the middle.
553 ONLY_VALUE should be nonzero if the result is not going to be emitted
554 into the instruction stream and zero if it is going to be expanded.
555 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
556 is returned, otherwise NULL, since
557 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
558 evaluate the side-effects.
560 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
561 accesses. Note that this implies the result is not going to be emitted
562 into the instruction stream.
564 The value returned is of type `ssizetype'.
566 Unfortunately, string_constant can't access the values of const char
567 arrays with initializers, so neither can we do so here. */
570 c_strlen (tree src
, int only_value
)
573 if (TREE_CODE (src
) == COND_EXPR
574 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
578 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
579 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
580 if (tree_int_cst_equal (len1
, len2
))
584 if (TREE_CODE (src
) == COMPOUND_EXPR
585 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
586 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
588 location_t loc
= EXPR_LOC_OR_LOC (src
, input_location
);
590 /* Offset from the beginning of the string in bytes. */
592 src
= string_constant (src
, &byteoff
);
596 /* Determine the size of the string element. */
598 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src
))));
600 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
602 unsigned maxelts
= TREE_STRING_LENGTH (src
) / eltsize
- 1;
604 /* PTR can point to the byte representation of any string type, including
605 char* and wchar_t*. */
606 const char *ptr
= TREE_STRING_POINTER (src
);
608 if (byteoff
&& TREE_CODE (byteoff
) != INTEGER_CST
)
610 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
611 compute the offset to the following null if we don't know where to
612 start searching for it. */
613 if (string_length (ptr
, eltsize
, maxelts
) < maxelts
)
615 /* Return when an embedded null character is found. */
619 /* We don't know the starting offset, but we do know that the string
620 has no internal zero bytes. We can assume that the offset falls
621 within the bounds of the string; otherwise, the programmer deserves
622 what he gets. Subtract the offset from the length of the string,
623 and return that. This would perhaps not be valid if we were dealing
624 with named arrays in addition to literal string constants. */
626 return size_diffop_loc (loc
, size_int (maxelts
* eltsize
), byteoff
);
629 /* Offset from the beginning of the string in elements. */
630 HOST_WIDE_INT eltoff
;
632 /* We have a known offset into the string. Start searching there for
633 a null character if we can represent it as a single HOST_WIDE_INT. */
636 else if (! tree_fits_shwi_p (byteoff
))
639 eltoff
= tree_to_shwi (byteoff
) / eltsize
;
641 /* If the offset is known to be out of bounds, warn, and call strlen at
643 if (eltoff
< 0 || eltoff
> maxelts
)
645 /* Suppress multiple warnings for propagated constant strings. */
647 && !TREE_NO_WARNING (src
))
649 warning_at (loc
, 0, "offset %qwi outside bounds of constant string",
651 TREE_NO_WARNING (src
) = 1;
656 /* Use strlen to search for the first zero byte. Since any strings
657 constructed with build_string will have nulls appended, we win even
658 if we get handed something like (char[4])"abcd".
660 Since ELTOFF is our starting index into the string, no further
661 calculation is needed. */
662 unsigned len
= string_length (ptr
+ eltoff
* eltsize
, eltsize
,
665 return ssize_int (len
);
668 /* Return a constant integer corresponding to target reading
669 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
672 c_readstr (const char *str
, scalar_int_mode mode
)
676 HOST_WIDE_INT tmp
[MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
];
678 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
679 unsigned int len
= (GET_MODE_PRECISION (mode
) + HOST_BITS_PER_WIDE_INT
- 1)
680 / HOST_BITS_PER_WIDE_INT
;
682 gcc_assert (len
<= MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
);
683 for (i
= 0; i
< len
; i
++)
687 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
690 if (WORDS_BIG_ENDIAN
)
691 j
= GET_MODE_SIZE (mode
) - i
- 1;
692 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
693 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
694 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
698 ch
= (unsigned char) str
[i
];
699 tmp
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
702 wide_int c
= wide_int::from_array (tmp
, len
, GET_MODE_PRECISION (mode
));
703 return immed_wide_int_const (c
, mode
);
706 /* Cast a target constant CST to target CHAR and if that value fits into
707 host char type, return zero and put that value into variable pointed to by
711 target_char_cast (tree cst
, char *p
)
713 unsigned HOST_WIDE_INT val
, hostval
;
715 if (TREE_CODE (cst
) != INTEGER_CST
716 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
719 /* Do not care if it fits or not right here. */
720 val
= TREE_INT_CST_LOW (cst
);
722 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
723 val
&= (HOST_WIDE_INT_1U
<< CHAR_TYPE_SIZE
) - 1;
726 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
727 hostval
&= (HOST_WIDE_INT_1U
<< HOST_BITS_PER_CHAR
) - 1;
736 /* Similar to save_expr, but assumes that arbitrary code is not executed
737 in between the multiple evaluations. In particular, we assume that a
738 non-addressable local variable will not be modified. */
741 builtin_save_expr (tree exp
)
743 if (TREE_CODE (exp
) == SSA_NAME
744 || (TREE_ADDRESSABLE (exp
) == 0
745 && (TREE_CODE (exp
) == PARM_DECL
746 || (VAR_P (exp
) && !TREE_STATIC (exp
)))))
749 return save_expr (exp
);
752 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
753 times to get the address of either a higher stack frame, or a return
754 address located within it (depending on FNDECL_CODE). */
757 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
760 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
763 /* For a zero count with __builtin_return_address, we don't care what
764 frame address we return, because target-specific definitions will
765 override us. Therefore frame pointer elimination is OK, and using
766 the soft frame pointer is OK.
768 For a nonzero count, or a zero count with __builtin_frame_address,
769 we require a stable offset from the current frame pointer to the
770 previous one, so we must use the hard frame pointer, and
771 we must disable frame pointer elimination. */
772 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
773 tem
= frame_pointer_rtx
;
776 tem
= hard_frame_pointer_rtx
;
778 /* Tell reload not to eliminate the frame pointer. */
779 crtl
->accesses_prior_frames
= 1;
784 SETUP_FRAME_ADDRESSES ();
786 /* On the SPARC, the return address is not in the frame, it is in a
787 register. There is no way to access it off of the current frame
788 pointer, but it can be accessed off the previous frame pointer by
789 reading the value from the register window save area. */
790 if (RETURN_ADDR_IN_PREVIOUS_FRAME
&& fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
793 /* Scan back COUNT frames to the specified frame. */
794 for (i
= 0; i
< count
; i
++)
796 /* Assume the dynamic chain pointer is in the word that the
797 frame address points to, unless otherwise specified. */
798 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
799 tem
= memory_address (Pmode
, tem
);
800 tem
= gen_frame_mem (Pmode
, tem
);
801 tem
= copy_to_reg (tem
);
804 /* For __builtin_frame_address, return what we've got. But, on
805 the SPARC for example, we may have to add a bias. */
806 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
807 return FRAME_ADDR_RTX (tem
);
809 /* For __builtin_return_address, get the return address from that frame. */
810 #ifdef RETURN_ADDR_RTX
811 tem
= RETURN_ADDR_RTX (count
, tem
);
813 tem
= memory_address (Pmode
,
814 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
815 tem
= gen_frame_mem (Pmode
, tem
);
820 /* Alias set used for setjmp buffer. */
821 static alias_set_type setjmp_alias_set
= -1;
823 /* Construct the leading half of a __builtin_setjmp call. Control will
824 return to RECEIVER_LABEL. This is also called directly by the SJLJ
825 exception handling code. */
828 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
830 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
834 if (setjmp_alias_set
== -1)
835 setjmp_alias_set
= new_alias_set ();
837 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
839 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
841 /* We store the frame pointer and the address of receiver_label in
842 the buffer and use the rest of it for the stack save area, which
843 is machine-dependent. */
845 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
846 set_mem_alias_set (mem
, setjmp_alias_set
);
847 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
849 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
850 GET_MODE_SIZE (Pmode
))),
851 set_mem_alias_set (mem
, setjmp_alias_set
);
853 emit_move_insn (validize_mem (mem
),
854 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
856 stack_save
= gen_rtx_MEM (sa_mode
,
857 plus_constant (Pmode
, buf_addr
,
858 2 * GET_MODE_SIZE (Pmode
)));
859 set_mem_alias_set (stack_save
, setjmp_alias_set
);
860 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
862 /* If there is further processing to do, do it. */
863 if (targetm
.have_builtin_setjmp_setup ())
864 emit_insn (targetm
.gen_builtin_setjmp_setup (buf_addr
));
866 /* We have a nonlocal label. */
867 cfun
->has_nonlocal_label
= 1;
870 /* Construct the trailing part of a __builtin_setjmp call. This is
871 also called directly by the SJLJ exception handling code.
872 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
875 expand_builtin_setjmp_receiver (rtx receiver_label
)
879 /* Mark the FP as used when we get here, so we have to make sure it's
880 marked as used by this function. */
881 emit_use (hard_frame_pointer_rtx
);
883 /* Mark the static chain as clobbered here so life information
884 doesn't get messed up for it. */
885 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
886 if (chain
&& REG_P (chain
))
887 emit_clobber (chain
);
889 /* Now put in the code to restore the frame pointer, and argument
890 pointer, if needed. */
891 if (! targetm
.have_nonlocal_goto ())
893 /* First adjust our frame pointer to its actual value. It was
894 previously set to the start of the virtual area corresponding to
895 the stacked variables when we branched here and now needs to be
896 adjusted to the actual hardware fp value.
898 Assignments to virtual registers are converted by
899 instantiate_virtual_regs into the corresponding assignment
900 to the underlying register (fp in this case) that makes
901 the original assignment true.
902 So the following insn will actually be decrementing fp by
903 STARTING_FRAME_OFFSET. */
904 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
906 /* Restoring the frame pointer also modifies the hard frame pointer.
907 Mark it used (so that the previous assignment remains live once
908 the frame pointer is eliminated) and clobbered (to represent the
909 implicit update from the assignment). */
910 emit_use (hard_frame_pointer_rtx
);
911 emit_clobber (hard_frame_pointer_rtx
);
914 if (!HARD_FRAME_POINTER_IS_ARG_POINTER
&& fixed_regs
[ARG_POINTER_REGNUM
])
916 /* If the argument pointer can be eliminated in favor of the
917 frame pointer, we don't need to restore it. We assume here
918 that if such an elimination is present, it can always be used.
919 This is the case on all known machines; if we don't make this
920 assumption, we do unnecessary saving on many machines. */
922 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
924 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
925 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
926 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
929 if (i
== ARRAY_SIZE (elim_regs
))
931 /* Now restore our arg pointer from the address at which it
932 was saved in our stack frame. */
933 emit_move_insn (crtl
->args
.internal_arg_pointer
,
934 copy_to_reg (get_arg_pointer_save_area ()));
938 if (receiver_label
!= NULL
&& targetm
.have_builtin_setjmp_receiver ())
939 emit_insn (targetm
.gen_builtin_setjmp_receiver (receiver_label
));
940 else if (targetm
.have_nonlocal_goto_receiver ())
941 emit_insn (targetm
.gen_nonlocal_goto_receiver ());
945 /* We must not allow the code we just generated to be reordered by
946 scheduling. Specifically, the update of the frame pointer must
947 happen immediately, not later. */
948 emit_insn (gen_blockage ());
951 /* __builtin_longjmp is passed a pointer to an array of five words (not
952 all will be used on all machines). It operates similarly to the C
953 library function of the same name, but is more efficient. Much of
954 the code below is copied from the handling of non-local gotos. */
957 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
960 rtx_insn
*insn
, *last
;
961 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
963 /* DRAP is needed for stack realign if longjmp is expanded to current
965 if (SUPPORTS_STACK_ALIGNMENT
)
966 crtl
->need_drap
= true;
968 if (setjmp_alias_set
== -1)
969 setjmp_alias_set
= new_alias_set ();
971 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
973 buf_addr
= force_reg (Pmode
, buf_addr
);
975 /* We require that the user must pass a second argument of 1, because
976 that is what builtin_setjmp will return. */
977 gcc_assert (value
== const1_rtx
);
979 last
= get_last_insn ();
980 if (targetm
.have_builtin_longjmp ())
981 emit_insn (targetm
.gen_builtin_longjmp (buf_addr
));
984 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
985 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
986 GET_MODE_SIZE (Pmode
)));
988 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
989 2 * GET_MODE_SIZE (Pmode
)));
990 set_mem_alias_set (fp
, setjmp_alias_set
);
991 set_mem_alias_set (lab
, setjmp_alias_set
);
992 set_mem_alias_set (stack
, setjmp_alias_set
);
994 /* Pick up FP, label, and SP from the block and jump. This code is
995 from expand_goto in stmt.c; see there for detailed comments. */
996 if (targetm
.have_nonlocal_goto ())
997 /* We have to pass a value to the nonlocal_goto pattern that will
998 get copied into the static_chain pointer, but it does not matter
999 what that value is, because builtin_setjmp does not use it. */
1000 emit_insn (targetm
.gen_nonlocal_goto (value
, lab
, stack
, fp
));
1003 lab
= copy_to_reg (lab
);
1005 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1006 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1008 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1009 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1011 emit_use (hard_frame_pointer_rtx
);
1012 emit_use (stack_pointer_rtx
);
1013 emit_indirect_jump (lab
);
1017 /* Search backwards and mark the jump insn as a non-local goto.
1018 Note that this precludes the use of __builtin_longjmp to a
1019 __builtin_setjmp target in the same function. However, we've
1020 already cautioned the user that these functions are for
1021 internal exception handling use only. */
1022 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1024 gcc_assert (insn
!= last
);
1028 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1031 else if (CALL_P (insn
))
1037 more_const_call_expr_args_p (const const_call_expr_arg_iterator
*iter
)
1039 return (iter
->i
< iter
->n
);
1042 /* This function validates the types of a function call argument list
1043 against a specified list of tree_codes. If the last specifier is a 0,
1044 that represents an ellipsis, otherwise the last specifier must be a
1048 validate_arglist (const_tree callexpr
, ...)
1050 enum tree_code code
;
1053 const_call_expr_arg_iterator iter
;
1056 va_start (ap
, callexpr
);
1057 init_const_call_expr_arg_iterator (callexpr
, &iter
);
1059 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1060 tree fn
= CALL_EXPR_FN (callexpr
);
1061 bitmap argmap
= get_nonnull_args (TREE_TYPE (TREE_TYPE (fn
)));
1063 for (unsigned argno
= 1; ; ++argno
)
1065 code
= (enum tree_code
) va_arg (ap
, int);
1070 /* This signifies an ellipses, any further arguments are all ok. */
1074 /* This signifies an endlink, if no arguments remain, return
1075 true, otherwise return false. */
1076 res
= !more_const_call_expr_args_p (&iter
);
1079 /* The actual argument must be nonnull when either the whole
1080 called function has been declared nonnull, or when the formal
1081 argument corresponding to the actual argument has been. */
1083 && (bitmap_empty_p (argmap
) || bitmap_bit_p (argmap
, argno
)))
1085 arg
= next_const_call_expr_arg (&iter
);
1086 if (!validate_arg (arg
, code
) || integer_zerop (arg
))
1092 /* If no parameters remain or the parameter's code does not
1093 match the specified code, return false. Otherwise continue
1094 checking any remaining arguments. */
1095 arg
= next_const_call_expr_arg (&iter
);
1096 if (!validate_arg (arg
, code
))
1102 /* We need gotos here since we can only have one VA_CLOSE in a
1107 BITMAP_FREE (argmap
);
1112 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1113 and the address of the save area. */
1116 expand_builtin_nonlocal_goto (tree exp
)
1118 tree t_label
, t_save_area
;
1119 rtx r_label
, r_save_area
, r_fp
, r_sp
;
1122 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1125 t_label
= CALL_EXPR_ARG (exp
, 0);
1126 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1128 r_label
= expand_normal (t_label
);
1129 r_label
= convert_memory_address (Pmode
, r_label
);
1130 r_save_area
= expand_normal (t_save_area
);
1131 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1132 /* Copy the address of the save location to a register just in case it was
1133 based on the frame pointer. */
1134 r_save_area
= copy_to_reg (r_save_area
);
1135 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1136 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1137 plus_constant (Pmode
, r_save_area
,
1138 GET_MODE_SIZE (Pmode
)));
1140 crtl
->has_nonlocal_goto
= 1;
1142 /* ??? We no longer need to pass the static chain value, afaik. */
1143 if (targetm
.have_nonlocal_goto ())
1144 emit_insn (targetm
.gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1147 r_label
= copy_to_reg (r_label
);
1149 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1150 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1152 /* Restore frame pointer for containing function. */
1153 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1154 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1156 /* USE of hard_frame_pointer_rtx added for consistency;
1157 not clear if really needed. */
1158 emit_use (hard_frame_pointer_rtx
);
1159 emit_use (stack_pointer_rtx
);
1161 /* If the architecture is using a GP register, we must
1162 conservatively assume that the target function makes use of it.
1163 The prologue of functions with nonlocal gotos must therefore
1164 initialize the GP register to the appropriate value, and we
1165 must then make sure that this value is live at the point
1166 of the jump. (Note that this doesn't necessarily apply
1167 to targets with a nonlocal_goto pattern; they are free
1168 to implement it in their own way. Note also that this is
1169 a no-op if the GP register is a global invariant.) */
1170 unsigned regnum
= PIC_OFFSET_TABLE_REGNUM
;
1171 if (regnum
!= INVALID_REGNUM
&& fixed_regs
[regnum
])
1172 emit_use (pic_offset_table_rtx
);
1174 emit_indirect_jump (r_label
);
1177 /* Search backwards to the jump insn and mark it as a
1179 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1183 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1186 else if (CALL_P (insn
))
1193 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1194 (not all will be used on all machines) that was passed to __builtin_setjmp.
1195 It updates the stack pointer in that block to the current value. This is
1196 also called directly by the SJLJ exception handling code. */
1199 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1201 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1203 = gen_rtx_MEM (sa_mode
,
1206 plus_constant (Pmode
, buf_addr
,
1207 2 * GET_MODE_SIZE (Pmode
))));
1209 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1212 /* Expand a call to __builtin_prefetch. For a target that does not support
1213 data prefetch, evaluate the memory address argument in case it has side
1217 expand_builtin_prefetch (tree exp
)
1219 tree arg0
, arg1
, arg2
;
1223 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1226 arg0
= CALL_EXPR_ARG (exp
, 0);
1228 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1229 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1231 nargs
= call_expr_nargs (exp
);
1233 arg1
= CALL_EXPR_ARG (exp
, 1);
1235 arg1
= integer_zero_node
;
1237 arg2
= CALL_EXPR_ARG (exp
, 2);
1239 arg2
= integer_three_node
;
1241 /* Argument 0 is an address. */
1242 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1244 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1245 if (TREE_CODE (arg1
) != INTEGER_CST
)
1247 error ("second argument to %<__builtin_prefetch%> must be a constant");
1248 arg1
= integer_zero_node
;
1250 op1
= expand_normal (arg1
);
1251 /* Argument 1 must be either zero or one. */
1252 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1254 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1259 /* Argument 2 (locality) must be a compile-time constant int. */
1260 if (TREE_CODE (arg2
) != INTEGER_CST
)
1262 error ("third argument to %<__builtin_prefetch%> must be a constant");
1263 arg2
= integer_zero_node
;
1265 op2
= expand_normal (arg2
);
1266 /* Argument 2 must be 0, 1, 2, or 3. */
1267 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1269 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1273 if (targetm
.have_prefetch ())
1275 struct expand_operand ops
[3];
1277 create_address_operand (&ops
[0], op0
);
1278 create_integer_operand (&ops
[1], INTVAL (op1
));
1279 create_integer_operand (&ops
[2], INTVAL (op2
));
1280 if (maybe_expand_insn (targetm
.code_for_prefetch
, 3, ops
))
1284 /* Don't do anything with direct references to volatile memory, but
1285 generate code to handle other side effects. */
1286 if (!MEM_P (op0
) && side_effects_p (op0
))
1290 /* Get a MEM rtx for expression EXP which is the address of an operand
1291 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1292 the maximum length of the block of memory that might be accessed or
1296 get_memory_rtx (tree exp
, tree len
)
1298 tree orig_exp
= exp
;
1301 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1302 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1303 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1304 exp
= TREE_OPERAND (exp
, 0);
1306 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1307 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1309 /* Get an expression we can use to find the attributes to assign to MEM.
1310 First remove any nops. */
1311 while (CONVERT_EXPR_P (exp
)
1312 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1313 exp
= TREE_OPERAND (exp
, 0);
1315 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1316 (as builtin stringops may alias with anything). */
1317 exp
= fold_build2 (MEM_REF
,
1318 build_array_type (char_type_node
,
1319 build_range_type (sizetype
,
1320 size_one_node
, len
)),
1321 exp
, build_int_cst (ptr_type_node
, 0));
1323 /* If the MEM_REF has no acceptable address, try to get the base object
1324 from the original address we got, and build an all-aliasing
1325 unknown-sized access to that one. */
1326 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1327 set_mem_attributes (mem
, exp
, 0);
1328 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1329 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1332 exp
= build_fold_addr_expr (exp
);
1333 exp
= fold_build2 (MEM_REF
,
1334 build_array_type (char_type_node
,
1335 build_range_type (sizetype
,
1338 exp
, build_int_cst (ptr_type_node
, 0));
1339 set_mem_attributes (mem
, exp
, 0);
1341 set_mem_alias_set (mem
, 0);
1345 /* Built-in functions to perform an untyped call and return. */
1347 #define apply_args_mode \
1348 (this_target_builtins->x_apply_args_mode)
1349 #define apply_result_mode \
1350 (this_target_builtins->x_apply_result_mode)
1352 /* Return the size required for the block returned by __builtin_apply_args,
1353 and initialize apply_args_mode. */
1356 apply_args_size (void)
1358 static int size
= -1;
1363 /* The values computed by this function never change. */
1366 /* The first value is the incoming arg-pointer. */
1367 size
= GET_MODE_SIZE (Pmode
);
1369 /* The second value is the structure value address unless this is
1370 passed as an "invisible" first argument. */
1371 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1372 size
+= GET_MODE_SIZE (Pmode
);
1374 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1375 if (FUNCTION_ARG_REGNO_P (regno
))
1377 mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1379 gcc_assert (mode
!= VOIDmode
);
1381 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1382 if (size
% align
!= 0)
1383 size
= CEIL (size
, align
) * align
;
1384 size
+= GET_MODE_SIZE (mode
);
1385 apply_args_mode
[regno
] = mode
;
1389 apply_args_mode
[regno
] = VOIDmode
;
1395 /* Return the size required for the block returned by __builtin_apply,
1396 and initialize apply_result_mode. */
1399 apply_result_size (void)
1401 static int size
= -1;
1405 /* The values computed by this function never change. */
1410 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1411 if (targetm
.calls
.function_value_regno_p (regno
))
1413 mode
= targetm
.calls
.get_raw_result_mode (regno
);
1415 gcc_assert (mode
!= VOIDmode
);
1417 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1418 if (size
% align
!= 0)
1419 size
= CEIL (size
, align
) * align
;
1420 size
+= GET_MODE_SIZE (mode
);
1421 apply_result_mode
[regno
] = mode
;
1424 apply_result_mode
[regno
] = VOIDmode
;
1426 /* Allow targets that use untyped_call and untyped_return to override
1427 the size so that machine-specific information can be stored here. */
1428 #ifdef APPLY_RESULT_SIZE
1429 size
= APPLY_RESULT_SIZE
;
1435 /* Create a vector describing the result block RESULT. If SAVEP is true,
1436 the result block is used to save the values; otherwise it is used to
1437 restore the values. */
1440 result_vector (int savep
, rtx result
)
1442 int regno
, size
, align
, nelts
;
1445 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1448 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1449 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1451 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1452 if (size
% align
!= 0)
1453 size
= CEIL (size
, align
) * align
;
1454 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1455 mem
= adjust_address (result
, mode
, size
);
1456 savevec
[nelts
++] = (savep
1457 ? gen_rtx_SET (mem
, reg
)
1458 : gen_rtx_SET (reg
, mem
));
1459 size
+= GET_MODE_SIZE (mode
);
1461 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1464 /* Save the state required to perform an untyped call with the same
1465 arguments as were passed to the current function. */
1468 expand_builtin_apply_args_1 (void)
1471 int size
, align
, regno
;
1473 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1475 /* Create a block where the arg-pointer, structure value address,
1476 and argument registers can be saved. */
1477 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1479 /* Walk past the arg-pointer and structure value address. */
1480 size
= GET_MODE_SIZE (Pmode
);
1481 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1482 size
+= GET_MODE_SIZE (Pmode
);
1484 /* Save each register used in calling a function to the block. */
1485 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1486 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1488 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1489 if (size
% align
!= 0)
1490 size
= CEIL (size
, align
) * align
;
1492 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1494 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1495 size
+= GET_MODE_SIZE (mode
);
1498 /* Save the arg pointer to the block. */
1499 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1500 /* We need the pointer as the caller actually passed them to us, not
1501 as we might have pretended they were passed. Make sure it's a valid
1502 operand, as emit_move_insn isn't expected to handle a PLUS. */
1503 if (STACK_GROWS_DOWNWARD
)
1505 = force_operand (plus_constant (Pmode
, tem
,
1506 crtl
->args
.pretend_args_size
),
1508 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1510 size
= GET_MODE_SIZE (Pmode
);
1512 /* Save the structure value address unless this is passed as an
1513 "invisible" first argument. */
1514 if (struct_incoming_value
)
1516 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1517 copy_to_reg (struct_incoming_value
));
1518 size
+= GET_MODE_SIZE (Pmode
);
1521 /* Return the address of the block. */
1522 return copy_addr_to_reg (XEXP (registers
, 0));
1525 /* __builtin_apply_args returns block of memory allocated on
1526 the stack into which is stored the arg pointer, structure
1527 value address, static chain, and all the registers that might
1528 possibly be used in performing a function call. The code is
1529 moved to the start of the function so the incoming values are
1533 expand_builtin_apply_args (void)
1535 /* Don't do __builtin_apply_args more than once in a function.
1536 Save the result of the first call and reuse it. */
1537 if (apply_args_value
!= 0)
1538 return apply_args_value
;
1540 /* When this function is called, it means that registers must be
1541 saved on entry to this function. So we migrate the
1542 call to the first insn of this function. */
1546 temp
= expand_builtin_apply_args_1 ();
1547 rtx_insn
*seq
= get_insns ();
1550 apply_args_value
= temp
;
1552 /* Put the insns after the NOTE that starts the function.
1553 If this is inside a start_sequence, make the outer-level insn
1554 chain current, so the code is placed at the start of the
1555 function. If internal_arg_pointer is a non-virtual pseudo,
1556 it needs to be placed after the function that initializes
1558 push_topmost_sequence ();
1559 if (REG_P (crtl
->args
.internal_arg_pointer
)
1560 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1561 emit_insn_before (seq
, parm_birth_insn
);
1563 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1564 pop_topmost_sequence ();
1569 /* Perform an untyped call and save the state required to perform an
1570 untyped return of whatever value was returned by the given function. */
1573 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1575 int size
, align
, regno
;
1577 rtx incoming_args
, result
, reg
, dest
, src
;
1578 rtx_call_insn
*call_insn
;
1579 rtx old_stack_level
= 0;
1580 rtx call_fusage
= 0;
1581 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1583 arguments
= convert_memory_address (Pmode
, arguments
);
1585 /* Create a block where the return registers can be saved. */
1586 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1588 /* Fetch the arg pointer from the ARGUMENTS block. */
1589 incoming_args
= gen_reg_rtx (Pmode
);
1590 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1591 if (!STACK_GROWS_DOWNWARD
)
1592 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1593 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1595 /* Push a new argument block and copy the arguments. Do not allow
1596 the (potential) memcpy call below to interfere with our stack
1598 do_pending_stack_adjust ();
1601 /* Save the stack with nonlocal if available. */
1602 if (targetm
.have_save_stack_nonlocal ())
1603 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1605 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1607 /* Allocate a block of memory onto the stack and copy the memory
1608 arguments to the outgoing arguments address. We can pass TRUE
1609 as the 4th argument because we just saved the stack pointer
1610 and will restore it right after the call. */
1611 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, true);
1613 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1614 may have already set current_function_calls_alloca to true.
1615 current_function_calls_alloca won't be set if argsize is zero,
1616 so we have to guarantee need_drap is true here. */
1617 if (SUPPORTS_STACK_ALIGNMENT
)
1618 crtl
->need_drap
= true;
1620 dest
= virtual_outgoing_args_rtx
;
1621 if (!STACK_GROWS_DOWNWARD
)
1623 if (CONST_INT_P (argsize
))
1624 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1626 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1628 dest
= gen_rtx_MEM (BLKmode
, dest
);
1629 set_mem_align (dest
, PARM_BOUNDARY
);
1630 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1631 set_mem_align (src
, PARM_BOUNDARY
);
1632 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1634 /* Refer to the argument block. */
1636 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1637 set_mem_align (arguments
, PARM_BOUNDARY
);
1639 /* Walk past the arg-pointer and structure value address. */
1640 size
= GET_MODE_SIZE (Pmode
);
1642 size
+= GET_MODE_SIZE (Pmode
);
1644 /* Restore each of the registers previously saved. Make USE insns
1645 for each of these registers for use in making the call. */
1646 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1647 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1649 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1650 if (size
% align
!= 0)
1651 size
= CEIL (size
, align
) * align
;
1652 reg
= gen_rtx_REG (mode
, regno
);
1653 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1654 use_reg (&call_fusage
, reg
);
1655 size
+= GET_MODE_SIZE (mode
);
1658 /* Restore the structure value address unless this is passed as an
1659 "invisible" first argument. */
1660 size
= GET_MODE_SIZE (Pmode
);
1663 rtx value
= gen_reg_rtx (Pmode
);
1664 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1665 emit_move_insn (struct_value
, value
);
1666 if (REG_P (struct_value
))
1667 use_reg (&call_fusage
, struct_value
);
1668 size
+= GET_MODE_SIZE (Pmode
);
1671 /* All arguments and registers used for the call are set up by now! */
1672 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1674 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1675 and we don't want to load it into a register as an optimization,
1676 because prepare_call_address already did it if it should be done. */
1677 if (GET_CODE (function
) != SYMBOL_REF
)
1678 function
= memory_address (FUNCTION_MODE
, function
);
1680 /* Generate the actual call instruction and save the return value. */
1681 if (targetm
.have_untyped_call ())
1683 rtx mem
= gen_rtx_MEM (FUNCTION_MODE
, function
);
1684 emit_call_insn (targetm
.gen_untyped_call (mem
, result
,
1685 result_vector (1, result
)));
1687 else if (targetm
.have_call_value ())
1691 /* Locate the unique return register. It is not possible to
1692 express a call that sets more than one return register using
1693 call_value; use untyped_call for that. In fact, untyped_call
1694 only needs to save the return registers in the given block. */
1695 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1696 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1698 gcc_assert (!valreg
); /* have_untyped_call required. */
1700 valreg
= gen_rtx_REG (mode
, regno
);
1703 emit_insn (targetm
.gen_call_value (valreg
,
1704 gen_rtx_MEM (FUNCTION_MODE
, function
),
1705 const0_rtx
, NULL_RTX
, const0_rtx
));
1707 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1712 /* Find the CALL insn we just emitted, and attach the register usage
1714 call_insn
= last_call_insn ();
1715 add_function_usage_to (call_insn
, call_fusage
);
1717 /* Restore the stack. */
1718 if (targetm
.have_save_stack_nonlocal ())
1719 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1721 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1722 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1726 /* Return the address of the result block. */
1727 result
= copy_addr_to_reg (XEXP (result
, 0));
1728 return convert_memory_address (ptr_mode
, result
);
1731 /* Perform an untyped return. */
1734 expand_builtin_return (rtx result
)
1736 int size
, align
, regno
;
1739 rtx_insn
*call_fusage
= 0;
1741 result
= convert_memory_address (Pmode
, result
);
1743 apply_result_size ();
1744 result
= gen_rtx_MEM (BLKmode
, result
);
1746 if (targetm
.have_untyped_return ())
1748 rtx vector
= result_vector (0, result
);
1749 emit_jump_insn (targetm
.gen_untyped_return (result
, vector
));
1754 /* Restore the return value and note that each value is used. */
1756 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1757 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1759 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1760 if (size
% align
!= 0)
1761 size
= CEIL (size
, align
) * align
;
1762 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1763 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1765 push_to_sequence (call_fusage
);
1767 call_fusage
= get_insns ();
1769 size
+= GET_MODE_SIZE (mode
);
1772 /* Put the USE insns before the return. */
1773 emit_insn (call_fusage
);
1775 /* Return whatever values was restored by jumping directly to the end
1777 expand_naked_return ();
1780 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1782 static enum type_class
1783 type_to_class (tree type
)
1785 switch (TREE_CODE (type
))
1787 case VOID_TYPE
: return void_type_class
;
1788 case INTEGER_TYPE
: return integer_type_class
;
1789 case ENUMERAL_TYPE
: return enumeral_type_class
;
1790 case BOOLEAN_TYPE
: return boolean_type_class
;
1791 case POINTER_TYPE
: return pointer_type_class
;
1792 case REFERENCE_TYPE
: return reference_type_class
;
1793 case OFFSET_TYPE
: return offset_type_class
;
1794 case REAL_TYPE
: return real_type_class
;
1795 case COMPLEX_TYPE
: return complex_type_class
;
1796 case FUNCTION_TYPE
: return function_type_class
;
1797 case METHOD_TYPE
: return method_type_class
;
1798 case RECORD_TYPE
: return record_type_class
;
1800 case QUAL_UNION_TYPE
: return union_type_class
;
1801 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1802 ? string_type_class
: array_type_class
);
1803 case LANG_TYPE
: return lang_type_class
;
1804 default: return no_type_class
;
1808 /* Expand a call EXP to __builtin_classify_type. */
1811 expand_builtin_classify_type (tree exp
)
1813 if (call_expr_nargs (exp
))
1814 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1815 return GEN_INT (no_type_class
);
1818 /* This helper macro, meant to be used in mathfn_built_in below,
1819 determines which among a set of three builtin math functions is
1820 appropriate for a given type mode. The `F' and `L' cases are
1821 automatically generated from the `double' case. */
1822 #define CASE_MATHFN(MATHFN) \
1823 CASE_CFN_##MATHFN: \
1824 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1825 fcodel = BUILT_IN_##MATHFN##L ; break;
1826 /* Similar to above, but appends _R after any F/L suffix. */
1827 #define CASE_MATHFN_REENT(MATHFN) \
1828 case CFN_BUILT_IN_##MATHFN##_R: \
1829 case CFN_BUILT_IN_##MATHFN##F_R: \
1830 case CFN_BUILT_IN_##MATHFN##L_R: \
1831 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1832 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1834 /* Return a function equivalent to FN but operating on floating-point
1835 values of type TYPE, or END_BUILTINS if no such function exists.
1836 This is purely an operation on function codes; it does not guarantee
1837 that the target actually has an implementation of the function. */
1839 static built_in_function
1840 mathfn_built_in_2 (tree type
, combined_fn fn
)
1842 built_in_function fcode
, fcodef
, fcodel
;
1856 CASE_MATHFN (COPYSIGN
)
1875 CASE_MATHFN_REENT (GAMMA
) /* GAMMA_R */
1876 CASE_MATHFN (HUGE_VAL
)
1880 CASE_MATHFN (IFLOOR
)
1883 CASE_MATHFN (IROUND
)
1890 CASE_MATHFN (LFLOOR
)
1891 CASE_MATHFN (LGAMMA
)
1892 CASE_MATHFN_REENT (LGAMMA
) /* LGAMMA_R */
1893 CASE_MATHFN (LLCEIL
)
1894 CASE_MATHFN (LLFLOOR
)
1895 CASE_MATHFN (LLRINT
)
1896 CASE_MATHFN (LLROUND
)
1903 CASE_MATHFN (LROUND
)
1907 CASE_MATHFN (NEARBYINT
)
1908 CASE_MATHFN (NEXTAFTER
)
1909 CASE_MATHFN (NEXTTOWARD
)
1913 CASE_MATHFN (REMAINDER
)
1914 CASE_MATHFN (REMQUO
)
1918 CASE_MATHFN (SCALBLN
)
1919 CASE_MATHFN (SCALBN
)
1920 CASE_MATHFN (SIGNBIT
)
1921 CASE_MATHFN (SIGNIFICAND
)
1923 CASE_MATHFN (SINCOS
)
1928 CASE_MATHFN (TGAMMA
)
1935 return END_BUILTINS
;
1938 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1940 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1942 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1945 return END_BUILTINS
;
1948 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1949 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1950 otherwise use the explicit declaration. If we can't do the conversion,
1954 mathfn_built_in_1 (tree type
, combined_fn fn
, bool implicit_p
)
1956 built_in_function fcode2
= mathfn_built_in_2 (type
, fn
);
1957 if (fcode2
== END_BUILTINS
)
1960 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
1963 return builtin_decl_explicit (fcode2
);
1966 /* Like mathfn_built_in_1, but always use the implicit array. */
1969 mathfn_built_in (tree type
, combined_fn fn
)
1971 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
1974 /* Like mathfn_built_in_1, but take a built_in_function and
1975 always use the implicit array. */
1978 mathfn_built_in (tree type
, enum built_in_function fn
)
1980 return mathfn_built_in_1 (type
, as_combined_fn (fn
), /*implicit=*/ 1);
1983 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1984 return its code, otherwise return IFN_LAST. Note that this function
1985 only tests whether the function is defined in internals.def, not whether
1986 it is actually available on the target. */
1989 associated_internal_fn (tree fndecl
)
1991 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
);
1992 tree return_type
= TREE_TYPE (TREE_TYPE (fndecl
));
1993 switch (DECL_FUNCTION_CODE (fndecl
))
1995 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1996 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1997 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1998 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1999 #include "internal-fn.def"
2001 CASE_FLT_FN (BUILT_IN_POW10
):
2004 CASE_FLT_FN (BUILT_IN_DREM
):
2005 return IFN_REMAINDER
;
2007 CASE_FLT_FN (BUILT_IN_SCALBN
):
2008 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2009 if (REAL_MODE_FORMAT (TYPE_MODE (return_type
))->b
== 2)
2018 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2019 on the current target by a call to an internal function, return the
2020 code of that internal function, otherwise return IFN_LAST. The caller
2021 is responsible for ensuring that any side-effects of the built-in
2022 call are dealt with correctly. E.g. if CALL sets errno, the caller
2023 must decide that the errno result isn't needed or make it available
2024 in some other way. */
2027 replacement_internal_fn (gcall
*call
)
2029 if (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
))
2031 internal_fn ifn
= associated_internal_fn (gimple_call_fndecl (call
));
2032 if (ifn
!= IFN_LAST
)
2034 tree_pair types
= direct_internal_fn_types (ifn
, call
);
2035 optimization_type opt_type
= bb_optimization_type (gimple_bb (call
));
2036 if (direct_internal_fn_supported_p (ifn
, types
, opt_type
))
2043 /* Expand a call to the builtin trinary math functions (fma).
2044 Return NULL_RTX if a normal call should be emitted rather than expanding the
2045 function in-line. EXP is the expression that is a call to the builtin
2046 function; if convenient, the result should be placed in TARGET.
2047 SUBTARGET may be used as the target for computing one of EXP's
2051 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2053 optab builtin_optab
;
2054 rtx op0
, op1
, op2
, result
;
2056 tree fndecl
= get_callee_fndecl (exp
);
2057 tree arg0
, arg1
, arg2
;
2060 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2063 arg0
= CALL_EXPR_ARG (exp
, 0);
2064 arg1
= CALL_EXPR_ARG (exp
, 1);
2065 arg2
= CALL_EXPR_ARG (exp
, 2);
2067 switch (DECL_FUNCTION_CODE (fndecl
))
2069 CASE_FLT_FN (BUILT_IN_FMA
):
2070 builtin_optab
= fma_optab
; break;
2075 /* Make a suitable register to place result in. */
2076 mode
= TYPE_MODE (TREE_TYPE (exp
));
2078 /* Before working hard, check whether the instruction is available. */
2079 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2082 result
= gen_reg_rtx (mode
);
2084 /* Always stabilize the argument list. */
2085 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2086 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2087 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2089 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2090 op1
= expand_normal (arg1
);
2091 op2
= expand_normal (arg2
);
2095 /* Compute into RESULT.
2096 Set RESULT to wherever the result comes back. */
2097 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2100 /* If we were unable to expand via the builtin, stop the sequence
2101 (without outputting the insns) and call to the library function
2102 with the stabilized argument list. */
2106 return expand_call (exp
, target
, target
== const0_rtx
);
2109 /* Output the entire sequence. */
2110 insns
= get_insns ();
2117 /* Expand a call to the builtin sin and cos math functions.
2118 Return NULL_RTX if a normal call should be emitted rather than expanding the
2119 function in-line. EXP is the expression that is a call to the builtin
2120 function; if convenient, the result should be placed in TARGET.
2121 SUBTARGET may be used as the target for computing one of EXP's
2125 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2127 optab builtin_optab
;
2130 tree fndecl
= get_callee_fndecl (exp
);
2134 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2137 arg
= CALL_EXPR_ARG (exp
, 0);
2139 switch (DECL_FUNCTION_CODE (fndecl
))
2141 CASE_FLT_FN (BUILT_IN_SIN
):
2142 CASE_FLT_FN (BUILT_IN_COS
):
2143 builtin_optab
= sincos_optab
; break;
2148 /* Make a suitable register to place result in. */
2149 mode
= TYPE_MODE (TREE_TYPE (exp
));
2151 /* Check if sincos insn is available, otherwise fallback
2152 to sin or cos insn. */
2153 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2154 switch (DECL_FUNCTION_CODE (fndecl
))
2156 CASE_FLT_FN (BUILT_IN_SIN
):
2157 builtin_optab
= sin_optab
; break;
2158 CASE_FLT_FN (BUILT_IN_COS
):
2159 builtin_optab
= cos_optab
; break;
2164 /* Before working hard, check whether the instruction is available. */
2165 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2167 rtx result
= gen_reg_rtx (mode
);
2169 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2170 need to expand the argument again. This way, we will not perform
2171 side-effects more the once. */
2172 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2174 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2178 /* Compute into RESULT.
2179 Set RESULT to wherever the result comes back. */
2180 if (builtin_optab
== sincos_optab
)
2184 switch (DECL_FUNCTION_CODE (fndecl
))
2186 CASE_FLT_FN (BUILT_IN_SIN
):
2187 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2189 CASE_FLT_FN (BUILT_IN_COS
):
2190 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2198 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2202 /* Output the entire sequence. */
2203 insns
= get_insns ();
2209 /* If we were unable to expand via the builtin, stop the sequence
2210 (without outputting the insns) and call to the library function
2211 with the stabilized argument list. */
2215 return expand_call (exp
, target
, target
== const0_rtx
);
2218 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2219 return an RTL instruction code that implements the functionality.
2220 If that isn't possible or available return CODE_FOR_nothing. */
2222 static enum insn_code
2223 interclass_mathfn_icode (tree arg
, tree fndecl
)
2225 bool errno_set
= false;
2226 optab builtin_optab
= unknown_optab
;
2229 switch (DECL_FUNCTION_CODE (fndecl
))
2231 CASE_FLT_FN (BUILT_IN_ILOGB
):
2232 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2233 CASE_FLT_FN (BUILT_IN_ISINF
):
2234 builtin_optab
= isinf_optab
; break;
2235 case BUILT_IN_ISNORMAL
:
2236 case BUILT_IN_ISFINITE
:
2237 CASE_FLT_FN (BUILT_IN_FINITE
):
2238 case BUILT_IN_FINITED32
:
2239 case BUILT_IN_FINITED64
:
2240 case BUILT_IN_FINITED128
:
2241 case BUILT_IN_ISINFD32
:
2242 case BUILT_IN_ISINFD64
:
2243 case BUILT_IN_ISINFD128
:
2244 /* These builtins have no optabs (yet). */
2250 /* There's no easy way to detect the case we need to set EDOM. */
2251 if (flag_errno_math
&& errno_set
)
2252 return CODE_FOR_nothing
;
2254 /* Optab mode depends on the mode of the input argument. */
2255 mode
= TYPE_MODE (TREE_TYPE (arg
));
2258 return optab_handler (builtin_optab
, mode
);
2259 return CODE_FOR_nothing
;
2262 /* Expand a call to one of the builtin math functions that operate on
2263 floating point argument and output an integer result (ilogb, isinf,
2265 Return 0 if a normal call should be emitted rather than expanding the
2266 function in-line. EXP is the expression that is a call to the builtin
2267 function; if convenient, the result should be placed in TARGET. */
2270 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2272 enum insn_code icode
= CODE_FOR_nothing
;
2274 tree fndecl
= get_callee_fndecl (exp
);
2278 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2281 arg
= CALL_EXPR_ARG (exp
, 0);
2282 icode
= interclass_mathfn_icode (arg
, fndecl
);
2283 mode
= TYPE_MODE (TREE_TYPE (arg
));
2285 if (icode
!= CODE_FOR_nothing
)
2287 struct expand_operand ops
[1];
2288 rtx_insn
*last
= get_last_insn ();
2289 tree orig_arg
= arg
;
2291 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2292 need to expand the argument again. This way, we will not perform
2293 side-effects more the once. */
2294 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2296 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2298 if (mode
!= GET_MODE (op0
))
2299 op0
= convert_to_mode (mode
, op0
, 0);
2301 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2302 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2303 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2304 return ops
[0].value
;
2306 delete_insns_since (last
);
2307 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2313 /* Expand a call to the builtin sincos math function.
2314 Return NULL_RTX if a normal call should be emitted rather than expanding the
2315 function in-line. EXP is the expression that is a call to the builtin
2319 expand_builtin_sincos (tree exp
)
2321 rtx op0
, op1
, op2
, target1
, target2
;
2323 tree arg
, sinp
, cosp
;
2325 location_t loc
= EXPR_LOCATION (exp
);
2326 tree alias_type
, alias_off
;
2328 if (!validate_arglist (exp
, REAL_TYPE
,
2329 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2332 arg
= CALL_EXPR_ARG (exp
, 0);
2333 sinp
= CALL_EXPR_ARG (exp
, 1);
2334 cosp
= CALL_EXPR_ARG (exp
, 2);
2336 /* Make a suitable register to place result in. */
2337 mode
= TYPE_MODE (TREE_TYPE (arg
));
2339 /* Check if sincos insn is available, otherwise emit the call. */
2340 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2343 target1
= gen_reg_rtx (mode
);
2344 target2
= gen_reg_rtx (mode
);
2346 op0
= expand_normal (arg
);
2347 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2348 alias_off
= build_int_cst (alias_type
, 0);
2349 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2351 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2354 /* Compute into target1 and target2.
2355 Set TARGET to wherever the result comes back. */
2356 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2357 gcc_assert (result
);
2359 /* Move target1 and target2 to the memory locations indicated
2361 emit_move_insn (op1
, target1
);
2362 emit_move_insn (op2
, target2
);
2367 /* Expand a call to the internal cexpi builtin to the sincos math function.
2368 EXP is the expression that is a call to the builtin function; if convenient,
2369 the result should be placed in TARGET. */
2372 expand_builtin_cexpi (tree exp
, rtx target
)
2374 tree fndecl
= get_callee_fndecl (exp
);
2378 location_t loc
= EXPR_LOCATION (exp
);
2380 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2383 arg
= CALL_EXPR_ARG (exp
, 0);
2384 type
= TREE_TYPE (arg
);
2385 mode
= TYPE_MODE (TREE_TYPE (arg
));
2387 /* Try expanding via a sincos optab, fall back to emitting a libcall
2388 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2389 is only generated from sincos, cexp or if we have either of them. */
2390 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2392 op1
= gen_reg_rtx (mode
);
2393 op2
= gen_reg_rtx (mode
);
2395 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2397 /* Compute into op1 and op2. */
2398 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2400 else if (targetm
.libc_has_function (function_sincos
))
2402 tree call
, fn
= NULL_TREE
;
2406 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2407 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2408 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2409 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2410 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2411 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2415 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2416 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2417 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2418 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2419 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2420 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2422 /* Make sure not to fold the sincos call again. */
2423 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2424 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2425 call
, 3, arg
, top1
, top2
));
2429 tree call
, fn
= NULL_TREE
, narg
;
2430 tree ctype
= build_complex_type (type
);
2432 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2433 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2434 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2435 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2436 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2437 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2441 /* If we don't have a decl for cexp create one. This is the
2442 friendliest fallback if the user calls __builtin_cexpi
2443 without full target C99 function support. */
2444 if (fn
== NULL_TREE
)
2447 const char *name
= NULL
;
2449 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2451 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2453 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2456 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2457 fn
= build_fn_decl (name
, fntype
);
2460 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2461 build_real (type
, dconst0
), arg
);
2463 /* Make sure not to fold the cexp call again. */
2464 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2465 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2466 target
, VOIDmode
, EXPAND_NORMAL
);
2469 /* Now build the proper return type. */
2470 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2471 make_tree (TREE_TYPE (arg
), op2
),
2472 make_tree (TREE_TYPE (arg
), op1
)),
2473 target
, VOIDmode
, EXPAND_NORMAL
);
2476 /* Conveniently construct a function call expression. FNDECL names the
2477 function to be called, N is the number of arguments, and the "..."
2478 parameters are the argument expressions. Unlike build_call_exr
2479 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2482 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2485 tree fntype
= TREE_TYPE (fndecl
);
2486 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2489 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2491 SET_EXPR_LOCATION (fn
, loc
);
2495 /* Expand a call to one of the builtin rounding functions gcc defines
2496 as an extension (lfloor and lceil). As these are gcc extensions we
2497 do not need to worry about setting errno to EDOM.
2498 If expanding via optab fails, lower expression to (int)(floor(x)).
2499 EXP is the expression that is a call to the builtin function;
2500 if convenient, the result should be placed in TARGET. */
2503 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2505 convert_optab builtin_optab
;
2508 tree fndecl
= get_callee_fndecl (exp
);
2509 enum built_in_function fallback_fn
;
2510 tree fallback_fndecl
;
2514 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2517 arg
= CALL_EXPR_ARG (exp
, 0);
2519 switch (DECL_FUNCTION_CODE (fndecl
))
2521 CASE_FLT_FN (BUILT_IN_ICEIL
):
2522 CASE_FLT_FN (BUILT_IN_LCEIL
):
2523 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2524 builtin_optab
= lceil_optab
;
2525 fallback_fn
= BUILT_IN_CEIL
;
2528 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2529 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2530 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2531 builtin_optab
= lfloor_optab
;
2532 fallback_fn
= BUILT_IN_FLOOR
;
2539 /* Make a suitable register to place result in. */
2540 mode
= TYPE_MODE (TREE_TYPE (exp
));
2542 target
= gen_reg_rtx (mode
);
2544 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2545 need to expand the argument again. This way, we will not perform
2546 side-effects more the once. */
2547 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2549 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2553 /* Compute into TARGET. */
2554 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2556 /* Output the entire sequence. */
2557 insns
= get_insns ();
2563 /* If we were unable to expand via the builtin, stop the sequence
2564 (without outputting the insns). */
2567 /* Fall back to floating point rounding optab. */
2568 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2570 /* For non-C99 targets we may end up without a fallback fndecl here
2571 if the user called __builtin_lfloor directly. In this case emit
2572 a call to the floor/ceil variants nevertheless. This should result
2573 in the best user experience for not full C99 targets. */
2574 if (fallback_fndecl
== NULL_TREE
)
2577 const char *name
= NULL
;
2579 switch (DECL_FUNCTION_CODE (fndecl
))
2581 case BUILT_IN_ICEIL
:
2582 case BUILT_IN_LCEIL
:
2583 case BUILT_IN_LLCEIL
:
2586 case BUILT_IN_ICEILF
:
2587 case BUILT_IN_LCEILF
:
2588 case BUILT_IN_LLCEILF
:
2591 case BUILT_IN_ICEILL
:
2592 case BUILT_IN_LCEILL
:
2593 case BUILT_IN_LLCEILL
:
2596 case BUILT_IN_IFLOOR
:
2597 case BUILT_IN_LFLOOR
:
2598 case BUILT_IN_LLFLOOR
:
2601 case BUILT_IN_IFLOORF
:
2602 case BUILT_IN_LFLOORF
:
2603 case BUILT_IN_LLFLOORF
:
2606 case BUILT_IN_IFLOORL
:
2607 case BUILT_IN_LFLOORL
:
2608 case BUILT_IN_LLFLOORL
:
2615 fntype
= build_function_type_list (TREE_TYPE (arg
),
2616 TREE_TYPE (arg
), NULL_TREE
);
2617 fallback_fndecl
= build_fn_decl (name
, fntype
);
2620 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2622 tmp
= expand_normal (exp
);
2623 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
2625 /* Truncate the result of floating point optab to integer
2626 via expand_fix (). */
2627 target
= gen_reg_rtx (mode
);
2628 expand_fix (target
, tmp
, 0);
2633 /* Expand a call to one of the builtin math functions doing integer
2635 Return 0 if a normal call should be emitted rather than expanding the
2636 function in-line. EXP is the expression that is a call to the builtin
2637 function; if convenient, the result should be placed in TARGET. */
2640 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2642 convert_optab builtin_optab
;
2645 tree fndecl
= get_callee_fndecl (exp
);
2648 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2650 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2653 arg
= CALL_EXPR_ARG (exp
, 0);
2655 switch (DECL_FUNCTION_CODE (fndecl
))
2657 CASE_FLT_FN (BUILT_IN_IRINT
):
2658 fallback_fn
= BUILT_IN_LRINT
;
2660 CASE_FLT_FN (BUILT_IN_LRINT
):
2661 CASE_FLT_FN (BUILT_IN_LLRINT
):
2662 builtin_optab
= lrint_optab
;
2665 CASE_FLT_FN (BUILT_IN_IROUND
):
2666 fallback_fn
= BUILT_IN_LROUND
;
2668 CASE_FLT_FN (BUILT_IN_LROUND
):
2669 CASE_FLT_FN (BUILT_IN_LLROUND
):
2670 builtin_optab
= lround_optab
;
2677 /* There's no easy way to detect the case we need to set EDOM. */
2678 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2681 /* Make a suitable register to place result in. */
2682 mode
= TYPE_MODE (TREE_TYPE (exp
));
2684 /* There's no easy way to detect the case we need to set EDOM. */
2685 if (!flag_errno_math
)
2687 rtx result
= gen_reg_rtx (mode
);
2689 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2690 need to expand the argument again. This way, we will not perform
2691 side-effects more the once. */
2692 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2694 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2698 if (expand_sfix_optab (result
, op0
, builtin_optab
))
2700 /* Output the entire sequence. */
2701 insns
= get_insns ();
2707 /* If we were unable to expand via the builtin, stop the sequence
2708 (without outputting the insns) and call to the library function
2709 with the stabilized argument list. */
2713 if (fallback_fn
!= BUILT_IN_NONE
)
2715 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2716 targets, (int) round (x) should never be transformed into
2717 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2718 a call to lround in the hope that the target provides at least some
2719 C99 functions. This should result in the best user experience for
2720 not full C99 targets. */
2721 tree fallback_fndecl
= mathfn_built_in_1
2722 (TREE_TYPE (arg
), as_combined_fn (fallback_fn
), 0);
2724 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2725 fallback_fndecl
, 1, arg
);
2727 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2728 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
2729 return convert_to_mode (mode
, target
, 0);
2732 return expand_call (exp
, target
, target
== const0_rtx
);
2735 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2736 a normal call should be emitted rather than expanding the function
2737 in-line. EXP is the expression that is a call to the builtin
2738 function; if convenient, the result should be placed in TARGET. */
2741 expand_builtin_powi (tree exp
, rtx target
)
2748 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2751 arg0
= CALL_EXPR_ARG (exp
, 0);
2752 arg1
= CALL_EXPR_ARG (exp
, 1);
2753 mode
= TYPE_MODE (TREE_TYPE (exp
));
2755 /* Emit a libcall to libgcc. */
2757 /* Mode of the 2nd argument must match that of an int. */
2758 mode2
= int_mode_for_size (INT_TYPE_SIZE
, 0).require ();
2760 if (target
== NULL_RTX
)
2761 target
= gen_reg_rtx (mode
);
2763 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2764 if (GET_MODE (op0
) != mode
)
2765 op0
= convert_to_mode (mode
, op0
, 0);
2766 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2767 if (GET_MODE (op1
) != mode2
)
2768 op1
= convert_to_mode (mode2
, op1
, 0);
2770 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
2771 target
, LCT_CONST
, mode
,
2772 op0
, mode
, op1
, mode2
);
2777 /* Expand expression EXP which is a call to the strlen builtin. Return
2778 NULL_RTX if we failed the caller should emit a normal call, otherwise
2779 try to get the result in TARGET, if convenient. */
2782 expand_builtin_strlen (tree exp
, rtx target
,
2783 machine_mode target_mode
)
2785 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
2789 struct expand_operand ops
[4];
2792 tree src
= CALL_EXPR_ARG (exp
, 0);
2794 rtx_insn
*before_strlen
;
2795 machine_mode insn_mode
;
2796 enum insn_code icode
= CODE_FOR_nothing
;
2799 /* If the length can be computed at compile-time, return it. */
2800 len
= c_strlen (src
, 0);
2802 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2804 /* If the length can be computed at compile-time and is constant
2805 integer, but there are side-effects in src, evaluate
2806 src for side-effects, then return len.
2807 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2808 can be optimized into: i++; x = 3; */
2809 len
= c_strlen (src
, 1);
2810 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
2812 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2813 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2816 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
2818 /* If SRC is not a pointer type, don't do this operation inline. */
2822 /* Bail out if we can't compute strlen in the right mode. */
2823 FOR_EACH_MODE_FROM (insn_mode
, target_mode
)
2825 icode
= optab_handler (strlen_optab
, insn_mode
);
2826 if (icode
!= CODE_FOR_nothing
)
2829 if (insn_mode
== VOIDmode
)
2832 /* Make a place to hold the source address. We will not expand
2833 the actual source until we are sure that the expansion will
2834 not fail -- there are trees that cannot be expanded twice. */
2835 src_reg
= gen_reg_rtx (Pmode
);
2837 /* Mark the beginning of the strlen sequence so we can emit the
2838 source operand later. */
2839 before_strlen
= get_last_insn ();
2841 create_output_operand (&ops
[0], target
, insn_mode
);
2842 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
2843 create_integer_operand (&ops
[2], 0);
2844 create_integer_operand (&ops
[3], align
);
2845 if (!maybe_expand_insn (icode
, 4, ops
))
2848 /* Now that we are assured of success, expand the source. */
2850 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
2853 #ifdef POINTERS_EXTEND_UNSIGNED
2854 if (GET_MODE (pat
) != Pmode
)
2855 pat
= convert_to_mode (Pmode
, pat
,
2856 POINTERS_EXTEND_UNSIGNED
);
2858 emit_move_insn (src_reg
, pat
);
2864 emit_insn_after (pat
, before_strlen
);
2866 emit_insn_before (pat
, get_insns ());
2868 /* Return the value in the proper mode for this function. */
2869 if (GET_MODE (ops
[0].value
) == target_mode
)
2870 target
= ops
[0].value
;
2871 else if (target
!= 0)
2872 convert_move (target
, ops
[0].value
, 0);
2874 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
2880 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2881 bytes from constant string DATA + OFFSET and return it as target
2885 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
2886 scalar_int_mode mode
)
2888 const char *str
= (const char *) data
;
2890 gcc_assert (offset
>= 0
2891 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
2892 <= strlen (str
) + 1));
2894 return c_readstr (str
+ offset
, mode
);
2897 /* LEN specify length of the block of memcpy/memset operation.
2898 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2899 In some cases we can make very likely guess on max size, then we
2900 set it into PROBABLE_MAX_SIZE. */
2903 determine_block_size (tree len
, rtx len_rtx
,
2904 unsigned HOST_WIDE_INT
*min_size
,
2905 unsigned HOST_WIDE_INT
*max_size
,
2906 unsigned HOST_WIDE_INT
*probable_max_size
)
2908 if (CONST_INT_P (len_rtx
))
2910 *min_size
= *max_size
= *probable_max_size
= UINTVAL (len_rtx
);
2916 enum value_range_type range_type
= VR_UNDEFINED
;
2918 /* Determine bounds from the type. */
2919 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len
))))
2920 *min_size
= tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len
)));
2923 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len
))))
2924 *probable_max_size
= *max_size
2925 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len
)));
2927 *probable_max_size
= *max_size
= GET_MODE_MASK (GET_MODE (len_rtx
));
2929 if (TREE_CODE (len
) == SSA_NAME
)
2930 range_type
= get_range_info (len
, &min
, &max
);
2931 if (range_type
== VR_RANGE
)
2933 if (wi::fits_uhwi_p (min
) && *min_size
< min
.to_uhwi ())
2934 *min_size
= min
.to_uhwi ();
2935 if (wi::fits_uhwi_p (max
) && *max_size
> max
.to_uhwi ())
2936 *probable_max_size
= *max_size
= max
.to_uhwi ();
2938 else if (range_type
== VR_ANTI_RANGE
)
2940 /* Anti range 0...N lets us to determine minimal size to N+1. */
2943 if (wi::fits_uhwi_p (max
) && max
.to_uhwi () + 1 != 0)
2944 *min_size
= max
.to_uhwi () + 1;
2952 Produce anti range allowing negative values of N. We still
2953 can use the information and make a guess that N is not negative.
2955 else if (!wi::leu_p (max
, 1 << 30) && wi::fits_uhwi_p (min
))
2956 *probable_max_size
= min
.to_uhwi () - 1;
2959 gcc_checking_assert (*max_size
<=
2960 (unsigned HOST_WIDE_INT
)
2961 GET_MODE_MASK (GET_MODE (len_rtx
)));
2964 /* Try to verify that the sizes and lengths of the arguments to a string
2965 manipulation function given by EXP are within valid bounds and that
2966 the operation does not lead to buffer overflow. Arguments other than
2967 EXP may be null. When non-null, the arguments have the following
2969 SIZE is the user-supplied size argument to the function (such as in
2970 memcpy(d, s, SIZE) or strncpy(d, s, SIZE). It specifies the exact
2971 number of bytes to write.
2972 MAXLEN is the user-supplied bound on the length of the source sequence
2973 (such as in strncat(d, s, N). It specifies the upper limit on the number
2975 SRC is the source string (such as in strcpy(d, s)) when the expression
2976 EXP is a string function call (as opposed to a memory call like memcpy).
2977 As an exception, SRC can also be an integer denoting the precomputed
2978 size of the source string or object (for functions like memcpy).
2979 OBJSIZE is the size of the destination object specified by the last
2980 argument to the _chk builtins, typically resulting from the expansion
2981 of __builtin_object_size (such as in __builtin___strcpy_chk(d, s,
2984 When SIZE is null LEN is checked to verify that it doesn't exceed
2987 If the call is successfully verified as safe from buffer overflow
2988 the function returns true, otherwise false.. */
2991 check_sizes (int opt
, tree exp
, tree size
, tree maxlen
, tree src
, tree objsize
)
2993 /* The size of the largest object is half the address space, or
2994 SSIZE_MAX. (This is way too permissive.) */
2995 tree maxobjsize
= TYPE_MAX_VALUE (ssizetype
);
2997 tree slen
= NULL_TREE
;
2999 tree range
[2] = { NULL_TREE
, NULL_TREE
};
3001 /* Set to true when the exact number of bytes written by a string
3002 function like strcpy is not known and the only thing that is
3003 known is that it must be at least one (for the terminating nul). */
3004 bool at_least_one
= false;
3007 /* SRC is normally a pointer to string but as a special case
3008 it can be an integer denoting the length of a string. */
3009 if (POINTER_TYPE_P (TREE_TYPE (src
)))
3011 /* Try to determine the range of lengths the source string
3012 refers to. If it can be determined and is less than
3013 the upper bound given by MAXLEN add one to it for
3014 the terminating nul. Otherwise, set it to one for
3015 the same reason, or to MAXLEN as appropriate. */
3016 get_range_strlen (src
, range
);
3017 if (range
[0] && (!maxlen
|| TREE_CODE (maxlen
) == INTEGER_CST
))
3019 if (maxlen
&& tree_int_cst_le (maxlen
, range
[0]))
3020 range
[0] = range
[1] = maxlen
;
3022 range
[0] = fold_build2 (PLUS_EXPR
, size_type_node
,
3023 range
[0], size_one_node
);
3025 if (maxlen
&& tree_int_cst_le (maxlen
, range
[1]))
3027 else if (!integer_all_onesp (range
[1]))
3028 range
[1] = fold_build2 (PLUS_EXPR
, size_type_node
,
3029 range
[1], size_one_node
);
3035 at_least_one
= true;
3036 slen
= size_one_node
;
3043 if (!size
&& !maxlen
)
3045 /* When the only available piece of data is the object size
3046 there is nothing to do. */
3050 /* Otherwise, when the length of the source sequence is known
3051 (as with with strlen), set SIZE to it. */
3057 objsize
= maxobjsize
;
3059 /* The SIZE is exact if it's non-null, constant, and in range of
3060 unsigned HOST_WIDE_INT. */
3061 bool exactsize
= size
&& tree_fits_uhwi_p (size
);
3064 get_size_range (size
, range
);
3066 /* First check the number of bytes to be written against the maximum
3068 if (range
[0] && tree_int_cst_lt (maxobjsize
, range
[0]))
3070 location_t loc
= tree_nonartificial_location (exp
);
3071 loc
= expansion_point_location_if_in_system_header (loc
);
3073 if (range
[0] == range
[1])
3074 warning_at (loc
, opt
,
3075 "%K%qD specified size %E "
3076 "exceeds maximum object size %E",
3077 exp
, get_callee_fndecl (exp
), range
[0], maxobjsize
);
3079 warning_at (loc
, opt
,
3080 "%K%qD specified size between %E and %E "
3081 "exceeds maximum object size %E",
3082 exp
, get_callee_fndecl (exp
),
3083 range
[0], range
[1], maxobjsize
);
3087 /* Next check the number of bytes to be written against the destination
3089 if (range
[0] || !exactsize
|| integer_all_onesp (size
))
3092 && ((tree_fits_uhwi_p (objsize
)
3093 && tree_int_cst_lt (objsize
, range
[0]))
3094 || (tree_fits_uhwi_p (size
)
3095 && tree_int_cst_lt (size
, range
[0]))))
3097 location_t loc
= tree_nonartificial_location (exp
);
3098 loc
= expansion_point_location_if_in_system_header (loc
);
3100 if (size
== slen
&& at_least_one
)
3102 /* This is a call to strcpy with a destination of 0 size
3103 and a source of unknown length. The call will write
3104 at least one byte past the end of the destination. */
3105 warning_at (loc
, opt
,
3106 "%K%qD writing %E or more bytes into a region "
3107 "of size %E overflows the destination",
3108 exp
, get_callee_fndecl (exp
), range
[0], objsize
);
3110 else if (tree_int_cst_equal (range
[0], range
[1]))
3111 warning_at (loc
, opt
,
3112 (integer_onep (range
[0])
3113 ? G_("%K%qD writing %E byte into a region "
3114 "of size %E overflows the destination")
3115 : G_("%K%qD writing %E bytes into a region "
3116 "of size %E overflows the destination")),
3117 exp
, get_callee_fndecl (exp
), range
[0], objsize
);
3118 else if (tree_int_cst_sign_bit (range
[1]))
3120 /* Avoid printing the upper bound if it's invalid. */
3121 warning_at (loc
, opt
,
3122 "%K%qD writing %E or more bytes into a region "
3123 "of size %E overflows the destination",
3124 exp
, get_callee_fndecl (exp
), range
[0], objsize
);
3127 warning_at (loc
, opt
,
3128 "%K%qD writing between %E and %E bytes into "
3129 "a region of size %E overflows the destination",
3130 exp
, get_callee_fndecl (exp
), range
[0], range
[1],
3133 /* Return error when an overflow has been detected. */
3138 /* Check the maximum length of the source sequence against the size
3139 of the destination object if known, or against the maximum size
3143 get_size_range (maxlen
, range
);
3145 if (range
[0] && objsize
&& tree_fits_uhwi_p (objsize
))
3147 location_t loc
= tree_nonartificial_location (exp
);
3148 loc
= expansion_point_location_if_in_system_header (loc
);
3150 if (tree_int_cst_lt (maxobjsize
, range
[0]))
3152 /* Warn about crazy big sizes first since that's more
3153 likely to be meaningful than saying that the bound
3154 is greater than the object size if both are big. */
3155 if (range
[0] == range
[1])
3156 warning_at (loc
, opt
,
3157 "%K%qD specified bound %E "
3158 "exceeds maximum object size %E",
3159 exp
, get_callee_fndecl (exp
),
3160 range
[0], maxobjsize
);
3162 warning_at (loc
, opt
,
3163 "%K%qD specified bound between %E and %E "
3164 "exceeds maximum object size %E",
3165 exp
, get_callee_fndecl (exp
),
3166 range
[0], range
[1], maxobjsize
);
3171 if (objsize
!= maxobjsize
&& tree_int_cst_lt (objsize
, range
[0]))
3173 if (tree_int_cst_equal (range
[0], range
[1]))
3174 warning_at (loc
, opt
,
3175 "%K%qD specified bound %E "
3176 "exceeds destination size %E",
3177 exp
, get_callee_fndecl (exp
),
3180 warning_at (loc
, opt
,
3181 "%K%qD specified bound between %E and %E "
3182 "exceeds destination size %E",
3183 exp
, get_callee_fndecl (exp
),
3184 range
[0], range
[1], objsize
);
3193 && tree_int_cst_lt (slen
, range
[0]))
3195 location_t loc
= tree_nonartificial_location (exp
);
3197 if (tree_int_cst_equal (range
[0], range
[1]))
3198 warning_at (loc
, opt
,
3199 (tree_int_cst_equal (range
[0], integer_one_node
)
3200 ? G_("%K%qD reading %E byte from a region of size %E")
3201 : G_("%K%qD reading %E bytes from a region of size %E")),
3202 exp
, get_callee_fndecl (exp
), range
[0], slen
);
3203 else if (tree_int_cst_sign_bit (range
[1]))
3205 /* Avoid printing the upper bound if it's invalid. */
3206 warning_at (loc
, opt
,
3207 "%K%qD reading %E or more bytes from a region "
3209 exp
, get_callee_fndecl (exp
), range
[0], slen
);
3212 warning_at (loc
, opt
,
3213 "%K%qD reading between %E and %E bytes from a region "
3215 exp
, get_callee_fndecl (exp
), range
[0], range
[1], slen
);
3222 /* Helper to compute the size of the object referenced by the DEST
3223 expression which must of of pointer type, using Object Size type
3224 OSTYPE (only the least significant 2 bits are used). Return
3225 the size of the object if successful or NULL when the size cannot
3229 compute_objsize (tree dest
, int ostype
)
3231 unsigned HOST_WIDE_INT size
;
3232 if (compute_builtin_object_size (dest
, ostype
& 3, &size
))
3233 return build_int_cst (sizetype
, size
);
3238 /* Helper to determine and check the sizes of the source and the destination
3239 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3240 call expression, DEST is the destination argument, SRC is the source
3241 argument or null, and LEN is the number of bytes. Use Object Size type-0
3242 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3243 (no overflow or invalid sizes), false otherwise. */
3246 check_memop_sizes (tree exp
, tree dest
, tree src
, tree size
)
3248 if (!warn_stringop_overflow
)
3251 /* For functions like memset and memcpy that operate on raw memory
3252 try to determine the size of the largest source and destination
3253 object using type-0 Object Size regardless of the object size
3254 type specified by the option. */
3255 tree srcsize
= src
? compute_objsize (src
, 0) : NULL_TREE
;
3256 tree dstsize
= compute_objsize (dest
, 0);
3258 return check_sizes (OPT_Wstringop_overflow_
, exp
,
3259 size
, /*maxlen=*/NULL_TREE
, srcsize
, dstsize
);
3262 /* Validate memchr arguments without performing any expansion.
3266 expand_builtin_memchr (tree exp
, rtx
)
3268 if (!validate_arglist (exp
,
3269 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3272 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3273 tree len
= CALL_EXPR_ARG (exp
, 2);
3275 /* Diagnose calls where the specified length exceeds the size
3277 if (warn_stringop_overflow
)
3279 tree size
= compute_objsize (arg1
, 0);
3280 check_sizes (OPT_Wstringop_overflow_
,
3281 exp
, len
, /*maxlen=*/NULL_TREE
,
3282 size
, /*objsize=*/NULL_TREE
);
3288 /* Expand a call EXP to the memcpy builtin.
3289 Return NULL_RTX if we failed, the caller should emit a normal call,
3290 otherwise try to get the result in TARGET, if convenient (and in
3291 mode MODE if that's convenient). */
3294 expand_builtin_memcpy (tree exp
, rtx target
)
3296 if (!validate_arglist (exp
,
3297 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3300 tree dest
= CALL_EXPR_ARG (exp
, 0);
3301 tree src
= CALL_EXPR_ARG (exp
, 1);
3302 tree len
= CALL_EXPR_ARG (exp
, 2);
3304 check_memop_sizes (exp
, dest
, src
, len
);
3306 return expand_builtin_memory_copy_args (dest
, src
, len
, target
, exp
,
3310 /* Check a call EXP to the memmove built-in for validity.
3311 Return NULL_RTX on both success and failure. */
3314 expand_builtin_memmove (tree exp
, rtx
)
3316 if (!validate_arglist (exp
,
3317 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3320 tree dest
= CALL_EXPR_ARG (exp
, 0);
3321 tree src
= CALL_EXPR_ARG (exp
, 1);
3322 tree len
= CALL_EXPR_ARG (exp
, 2);
3324 check_memop_sizes (exp
, dest
, src
, len
);
3329 /* Expand an instrumented call EXP to the memcpy builtin.
3330 Return NULL_RTX if we failed, the caller should emit a normal call,
3331 otherwise try to get the result in TARGET, if convenient (and in
3332 mode MODE if that's convenient). */
3335 expand_builtin_memcpy_with_bounds (tree exp
, rtx target
)
3337 if (!validate_arglist (exp
,
3338 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3339 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3340 INTEGER_TYPE
, VOID_TYPE
))
3344 tree dest
= CALL_EXPR_ARG (exp
, 0);
3345 tree src
= CALL_EXPR_ARG (exp
, 2);
3346 tree len
= CALL_EXPR_ARG (exp
, 4);
3347 rtx res
= expand_builtin_memory_copy_args (dest
, src
, len
, target
, exp
,
3350 /* Return src bounds with the result. */
3353 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3354 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3355 res
= chkp_join_splitted_slot (res
, bnd
);
3361 /* Expand a call EXP to the mempcpy builtin.
3362 Return NULL_RTX if we failed; the caller should emit a normal call,
3363 otherwise try to get the result in TARGET, if convenient (and in
3364 mode MODE if that's convenient). If ENDP is 0 return the
3365 destination pointer, if ENDP is 1 return the end pointer ala
3366 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3370 expand_builtin_mempcpy (tree exp
, rtx target
)
3372 if (!validate_arglist (exp
,
3373 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3376 tree dest
= CALL_EXPR_ARG (exp
, 0);
3377 tree src
= CALL_EXPR_ARG (exp
, 1);
3378 tree len
= CALL_EXPR_ARG (exp
, 2);
3380 /* Avoid expanding mempcpy into memcpy when the call is determined
3381 to overflow the buffer. This also prevents the same overflow
3382 from being diagnosed again when expanding memcpy. */
3383 if (!check_memop_sizes (exp
, dest
, src
, len
))
3386 return expand_builtin_mempcpy_args (dest
, src
, len
,
3387 target
, exp
, /*endp=*/ 1);
3390 /* Expand an instrumented call EXP to the mempcpy builtin.
3391 Return NULL_RTX if we failed, the caller should emit a normal call,
3392 otherwise try to get the result in TARGET, if convenient (and in
3393 mode MODE if that's convenient). */
3396 expand_builtin_mempcpy_with_bounds (tree exp
, rtx target
)
3398 if (!validate_arglist (exp
,
3399 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3400 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3401 INTEGER_TYPE
, VOID_TYPE
))
3405 tree dest
= CALL_EXPR_ARG (exp
, 0);
3406 tree src
= CALL_EXPR_ARG (exp
, 2);
3407 tree len
= CALL_EXPR_ARG (exp
, 4);
3408 rtx res
= expand_builtin_mempcpy_args (dest
, src
, len
, target
,
3411 /* Return src bounds with the result. */
3414 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3415 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3416 res
= chkp_join_splitted_slot (res
, bnd
);
3422 /* Helper function to do the actual work for expand of memory copy family
3423 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3424 of memory from SRC to DEST and assign to TARGET if convenient.
3425 If ENDP is 0 return the
3426 destination pointer, if ENDP is 1 return the end pointer ala
3427 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3431 expand_builtin_memory_copy_args (tree dest
, tree src
, tree len
,
3432 rtx target
, tree exp
, int endp
)
3434 const char *src_str
;
3435 unsigned int src_align
= get_pointer_alignment (src
);
3436 unsigned int dest_align
= get_pointer_alignment (dest
);
3437 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3438 HOST_WIDE_INT expected_size
= -1;
3439 unsigned int expected_align
= 0;
3440 unsigned HOST_WIDE_INT min_size
;
3441 unsigned HOST_WIDE_INT max_size
;
3442 unsigned HOST_WIDE_INT probable_max_size
;
3444 /* If DEST is not a pointer type, call the normal function. */
3445 if (dest_align
== 0)
3448 /* If either SRC is not a pointer type, don't do this
3449 operation in-line. */
3453 if (currently_expanding_gimple_stmt
)
3454 stringop_block_profile (currently_expanding_gimple_stmt
,
3455 &expected_align
, &expected_size
);
3457 if (expected_align
< dest_align
)
3458 expected_align
= dest_align
;
3459 dest_mem
= get_memory_rtx (dest
, len
);
3460 set_mem_align (dest_mem
, dest_align
);
3461 len_rtx
= expand_normal (len
);
3462 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3463 &probable_max_size
);
3464 src_str
= c_getstr (src
);
3466 /* If SRC is a string constant and block move would be done
3467 by pieces, we can avoid loading the string from memory
3468 and only stored the computed constants. */
3470 && CONST_INT_P (len_rtx
)
3471 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3472 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3473 CONST_CAST (char *, src_str
),
3476 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3477 builtin_memcpy_read_str
,
3478 CONST_CAST (char *, src_str
),
3479 dest_align
, false, endp
);
3480 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3481 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3485 src_mem
= get_memory_rtx (src
, len
);
3486 set_mem_align (src_mem
, src_align
);
3488 /* Copy word part most expediently. */
3489 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3490 CALL_EXPR_TAILCALL (exp
)
3491 && (endp
== 0 || target
== const0_rtx
)
3492 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3493 expected_align
, expected_size
,
3494 min_size
, max_size
, probable_max_size
);
3498 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3499 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3502 if (endp
&& target
!= const0_rtx
)
3504 dest_addr
= gen_rtx_PLUS (ptr_mode
, dest_addr
, len_rtx
);
3505 /* stpcpy pointer to last byte. */
3507 dest_addr
= gen_rtx_MINUS (ptr_mode
, dest_addr
, const1_rtx
);
3514 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3515 rtx target
, tree orig_exp
, int endp
)
3517 return expand_builtin_memory_copy_args (dest
, src
, len
, target
, orig_exp
,
3521 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3522 we failed, the caller should emit a normal call, otherwise try to
3523 get the result in TARGET, if convenient. If ENDP is 0 return the
3524 destination pointer, if ENDP is 1 return the end pointer ala
3525 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3529 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3531 struct expand_operand ops
[3];
3535 if (!targetm
.have_movstr ())
3538 dest_mem
= get_memory_rtx (dest
, NULL
);
3539 src_mem
= get_memory_rtx (src
, NULL
);
3542 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3543 dest_mem
= replace_equiv_address (dest_mem
, target
);
3546 create_output_operand (&ops
[0], endp
? target
: NULL_RTX
, Pmode
);
3547 create_fixed_operand (&ops
[1], dest_mem
);
3548 create_fixed_operand (&ops
[2], src_mem
);
3549 if (!maybe_expand_insn (targetm
.code_for_movstr
, 3, ops
))
3552 if (endp
&& target
!= const0_rtx
)
3554 target
= ops
[0].value
;
3555 /* movstr is supposed to set end to the address of the NUL
3556 terminator. If the caller requested a mempcpy-like return value,
3560 rtx tem
= plus_constant (GET_MODE (target
),
3561 gen_lowpart (GET_MODE (target
), target
), 1);
3562 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3568 /* Do some very basic size validation of a call to the strcpy builtin
3569 given by EXP. Return NULL_RTX to have the built-in expand to a call
3570 to the library function. */
3573 expand_builtin_strcat (tree exp
, rtx
)
3575 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
)
3576 || !warn_stringop_overflow
)
3579 tree dest
= CALL_EXPR_ARG (exp
, 0);
3580 tree src
= CALL_EXPR_ARG (exp
, 1);
3582 /* There is no way here to determine the length of the string in
3583 the destination to which the SRC string is being appended so
3584 just diagnose cases when the souce string is longer than
3585 the destination object. */
3587 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
3589 check_sizes (OPT_Wstringop_overflow_
,
3590 exp
, /*size=*/NULL_TREE
, /*maxlen=*/NULL_TREE
, src
, destsize
);
3595 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3596 NULL_RTX if we failed the caller should emit a normal call, otherwise
3597 try to get the result in TARGET, if convenient (and in mode MODE if that's
3601 expand_builtin_strcpy (tree exp
, rtx target
)
3603 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3606 tree dest
= CALL_EXPR_ARG (exp
, 0);
3607 tree src
= CALL_EXPR_ARG (exp
, 1);
3609 if (warn_stringop_overflow
)
3611 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
3612 check_sizes (OPT_Wstringop_overflow_
,
3613 exp
, /*size=*/NULL_TREE
, /*maxlen=*/NULL_TREE
, src
, destsize
);
3616 return expand_builtin_strcpy_args (dest
, src
, target
);
3619 /* Helper function to do the actual work for expand_builtin_strcpy. The
3620 arguments to the builtin_strcpy call DEST and SRC are broken out
3621 so that this can also be called without constructing an actual CALL_EXPR.
3622 The other arguments and return value are the same as for
3623 expand_builtin_strcpy. */
3626 expand_builtin_strcpy_args (tree dest
, tree src
, rtx target
)
3628 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3631 /* Expand a call EXP to the stpcpy builtin.
3632 Return NULL_RTX if we failed the caller should emit a normal call,
3633 otherwise try to get the result in TARGET, if convenient (and in
3634 mode MODE if that's convenient). */
3637 expand_builtin_stpcpy (tree exp
, rtx target
, machine_mode mode
)
3640 location_t loc
= EXPR_LOCATION (exp
);
3642 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3645 dst
= CALL_EXPR_ARG (exp
, 0);
3646 src
= CALL_EXPR_ARG (exp
, 1);
3648 if (warn_stringop_overflow
)
3650 tree destsize
= compute_objsize (dst
, warn_stringop_overflow
- 1);
3651 check_sizes (OPT_Wstringop_overflow_
,
3652 exp
, /*size=*/NULL_TREE
, /*maxlen=*/NULL_TREE
, src
, destsize
);
3655 /* If return value is ignored, transform stpcpy into strcpy. */
3656 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
3658 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3659 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3660 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3667 /* Ensure we get an actual string whose length can be evaluated at
3668 compile-time, not an expression containing a string. This is
3669 because the latter will potentially produce pessimized code
3670 when used to produce the return value. */
3671 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3672 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3674 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3675 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3676 target
, exp
, /*endp=*/2);
3681 if (TREE_CODE (len
) == INTEGER_CST
)
3683 rtx len_rtx
= expand_normal (len
);
3685 if (CONST_INT_P (len_rtx
))
3687 ret
= expand_builtin_strcpy_args (dst
, src
, target
);
3693 if (mode
!= VOIDmode
)
3694 target
= gen_reg_rtx (mode
);
3696 target
= gen_reg_rtx (GET_MODE (ret
));
3698 if (GET_MODE (target
) != GET_MODE (ret
))
3699 ret
= gen_lowpart (GET_MODE (target
), ret
);
3701 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
3702 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3710 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3714 /* Check a call EXP to the stpncpy built-in for validity.
3715 Return NULL_RTX on both success and failure. */
3718 expand_builtin_stpncpy (tree exp
, rtx
)
3720 if (!validate_arglist (exp
,
3721 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
3722 || !warn_stringop_overflow
)
3725 /* The source and destination of the call. */
3726 tree dest
= CALL_EXPR_ARG (exp
, 0);
3727 tree src
= CALL_EXPR_ARG (exp
, 1);
3729 /* The exact number of bytes to write (not the maximum). */
3730 tree len
= CALL_EXPR_ARG (exp
, 2);
3732 /* The size of the destination object. */
3733 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
3735 check_sizes (OPT_Wstringop_overflow_
,
3736 exp
, len
, /*maxlen=*/NULL_TREE
, src
, destsize
);
3741 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3742 bytes from constant string DATA + OFFSET and return it as target
3746 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3747 scalar_int_mode mode
)
3749 const char *str
= (const char *) data
;
3751 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3754 return c_readstr (str
+ offset
, mode
);
3757 /* Helper to check the sizes of sequences and the destination of calls
3758 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3759 success (no overflow or invalid sizes), false otherwise. */
3762 check_strncat_sizes (tree exp
, tree objsize
)
3764 tree dest
= CALL_EXPR_ARG (exp
, 0);
3765 tree src
= CALL_EXPR_ARG (exp
, 1);
3766 tree maxlen
= CALL_EXPR_ARG (exp
, 2);
3768 /* Try to determine the range of lengths that the source expression
3771 get_range_strlen (src
, lenrange
);
3773 /* Try to verify that the destination is big enough for the shortest
3776 if (!objsize
&& warn_stringop_overflow
)
3778 /* If it hasn't been provided by __strncat_chk, try to determine
3779 the size of the destination object into which the source is
3781 objsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
3784 /* Add one for the terminating nul. */
3785 tree srclen
= (lenrange
[0]
3786 ? fold_build2 (PLUS_EXPR
, size_type_node
, lenrange
[0],
3790 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3791 nul so the specified upper bound should never be equal to (or greater
3792 than) the size of the destination. */
3793 if (tree_fits_uhwi_p (maxlen
) && tree_fits_uhwi_p (objsize
)
3794 && tree_int_cst_equal (objsize
, maxlen
))
3796 location_t loc
= tree_nonartificial_location (exp
);
3797 loc
= expansion_point_location_if_in_system_header (loc
);
3799 warning_at (loc
, OPT_Wstringop_overflow_
,
3800 "%K%qD specified bound %E equals destination size",
3801 exp
, get_callee_fndecl (exp
), maxlen
);
3807 || (maxlen
&& tree_fits_uhwi_p (maxlen
)
3808 && tree_fits_uhwi_p (srclen
)
3809 && tree_int_cst_lt (maxlen
, srclen
)))
3812 /* The number of bytes to write is LEN but check_sizes will also
3813 check SRCLEN if LEN's value isn't known. */
3814 return check_sizes (OPT_Wstringop_overflow_
,
3815 exp
, /*size=*/NULL_TREE
, maxlen
, srclen
, objsize
);
3818 /* Similar to expand_builtin_strcat, do some very basic size validation
3819 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
3820 the built-in expand to a call to the library function. */
3823 expand_builtin_strncat (tree exp
, rtx
)
3825 if (!validate_arglist (exp
,
3826 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
3827 || !warn_stringop_overflow
)
3830 tree dest
= CALL_EXPR_ARG (exp
, 0);
3831 tree src
= CALL_EXPR_ARG (exp
, 1);
3832 /* The upper bound on the number of bytes to write. */
3833 tree maxlen
= CALL_EXPR_ARG (exp
, 2);
3834 /* The length of the source sequence. */
3835 tree slen
= c_strlen (src
, 1);
3837 /* Try to determine the range of lengths that the source expression
3841 lenrange
[0] = lenrange
[1] = slen
;
3843 get_range_strlen (src
, lenrange
);
3845 /* Try to verify that the destination is big enough for the shortest
3846 string. First try to determine the size of the destination object
3847 into which the source is being copied. */
3848 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
3850 /* Add one for the terminating nul. */
3851 tree srclen
= (lenrange
[0]
3852 ? fold_build2 (PLUS_EXPR
, size_type_node
, lenrange
[0],
3856 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3857 nul so the specified upper bound should never be equal to (or greater
3858 than) the size of the destination. */
3859 if (tree_fits_uhwi_p (maxlen
) && tree_fits_uhwi_p (destsize
)
3860 && tree_int_cst_equal (destsize
, maxlen
))
3862 location_t loc
= tree_nonartificial_location (exp
);
3863 loc
= expansion_point_location_if_in_system_header (loc
);
3865 warning_at (loc
, OPT_Wstringop_overflow_
,
3866 "%K%qD specified bound %E equals destination size",
3867 exp
, get_callee_fndecl (exp
), maxlen
);
3873 || (maxlen
&& tree_fits_uhwi_p (maxlen
)
3874 && tree_fits_uhwi_p (srclen
)
3875 && tree_int_cst_lt (maxlen
, srclen
)))
3878 /* The number of bytes to write is LEN but check_sizes will also
3879 check SRCLEN if LEN's value isn't known. */
3880 check_sizes (OPT_Wstringop_overflow_
,
3881 exp
, /*size=*/NULL_TREE
, maxlen
, srclen
, destsize
);
3886 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3887 NULL_RTX if we failed the caller should emit a normal call. */
3890 expand_builtin_strncpy (tree exp
, rtx target
)
3892 location_t loc
= EXPR_LOCATION (exp
);
3894 if (validate_arglist (exp
,
3895 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3897 tree dest
= CALL_EXPR_ARG (exp
, 0);
3898 tree src
= CALL_EXPR_ARG (exp
, 1);
3899 /* The number of bytes to write (not the maximum). */
3900 tree len
= CALL_EXPR_ARG (exp
, 2);
3901 /* The length of the source sequence. */
3902 tree slen
= c_strlen (src
, 1);
3904 if (warn_stringop_overflow
)
3906 tree destsize
= compute_objsize (dest
,
3907 warn_stringop_overflow
- 1);
3909 /* The number of bytes to write is LEN but check_sizes will also
3910 check SLEN if LEN's value isn't known. */
3911 check_sizes (OPT_Wstringop_overflow_
,
3912 exp
, len
, /*maxlen=*/NULL_TREE
, src
, destsize
);
3915 /* We must be passed a constant len and src parameter. */
3916 if (!tree_fits_uhwi_p (len
) || !slen
|| !tree_fits_uhwi_p (slen
))
3919 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
3921 /* We're required to pad with trailing zeros if the requested
3922 len is greater than strlen(s2)+1. In that case try to
3923 use store_by_pieces, if it fails, punt. */
3924 if (tree_int_cst_lt (slen
, len
))
3926 unsigned int dest_align
= get_pointer_alignment (dest
);
3927 const char *p
= c_getstr (src
);
3930 if (!p
|| dest_align
== 0 || !tree_fits_uhwi_p (len
)
3931 || !can_store_by_pieces (tree_to_uhwi (len
),
3932 builtin_strncpy_read_str
,
3933 CONST_CAST (char *, p
),
3937 dest_mem
= get_memory_rtx (dest
, len
);
3938 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3939 builtin_strncpy_read_str
,
3940 CONST_CAST (char *, p
), dest_align
, false, 0);
3941 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3942 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3949 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3950 bytes from constant string DATA + OFFSET and return it as target
3954 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3955 scalar_int_mode mode
)
3957 const char *c
= (const char *) data
;
3958 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
3960 memset (p
, *c
, GET_MODE_SIZE (mode
));
3962 return c_readstr (p
, mode
);
3965 /* Callback routine for store_by_pieces. Return the RTL of a register
3966 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3967 char value given in the RTL register data. For example, if mode is
3968 4 bytes wide, return the RTL for 0x01010101*data. */
3971 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3972 scalar_int_mode mode
)
3978 size
= GET_MODE_SIZE (mode
);
3982 p
= XALLOCAVEC (char, size
);
3983 memset (p
, 1, size
);
3984 coeff
= c_readstr (p
, mode
);
3986 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3987 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3988 return force_reg (mode
, target
);
3991 /* Expand expression EXP, which is a call to the memset builtin. Return
3992 NULL_RTX if we failed the caller should emit a normal call, otherwise
3993 try to get the result in TARGET, if convenient (and in mode MODE if that's
3997 expand_builtin_memset (tree exp
, rtx target
, machine_mode mode
)
3999 if (!validate_arglist (exp
,
4000 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4003 tree dest
= CALL_EXPR_ARG (exp
, 0);
4004 tree val
= CALL_EXPR_ARG (exp
, 1);
4005 tree len
= CALL_EXPR_ARG (exp
, 2);
4007 check_memop_sizes (exp
, dest
, NULL_TREE
, len
);
4009 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
4012 /* Expand expression EXP, which is an instrumented call to the memset builtin.
4013 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
4014 try to get the result in TARGET, if convenient (and in mode MODE if that's
4018 expand_builtin_memset_with_bounds (tree exp
, rtx target
, machine_mode mode
)
4020 if (!validate_arglist (exp
,
4021 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
4022 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4026 tree dest
= CALL_EXPR_ARG (exp
, 0);
4027 tree val
= CALL_EXPR_ARG (exp
, 2);
4028 tree len
= CALL_EXPR_ARG (exp
, 3);
4029 rtx res
= expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
4031 /* Return src bounds with the result. */
4034 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
4035 expand_normal (CALL_EXPR_ARG (exp
, 1)));
4036 res
= chkp_join_splitted_slot (res
, bnd
);
4042 /* Helper function to do the actual work for expand_builtin_memset. The
4043 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4044 so that this can also be called without constructing an actual CALL_EXPR.
4045 The other arguments and return value are the same as for
4046 expand_builtin_memset. */
4049 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
4050 rtx target
, machine_mode mode
, tree orig_exp
)
4053 enum built_in_function fcode
;
4054 machine_mode val_mode
;
4056 unsigned int dest_align
;
4057 rtx dest_mem
, dest_addr
, len_rtx
;
4058 HOST_WIDE_INT expected_size
= -1;
4059 unsigned int expected_align
= 0;
4060 unsigned HOST_WIDE_INT min_size
;
4061 unsigned HOST_WIDE_INT max_size
;
4062 unsigned HOST_WIDE_INT probable_max_size
;
4064 dest_align
= get_pointer_alignment (dest
);
4066 /* If DEST is not a pointer type, don't do this operation in-line. */
4067 if (dest_align
== 0)
4070 if (currently_expanding_gimple_stmt
)
4071 stringop_block_profile (currently_expanding_gimple_stmt
,
4072 &expected_align
, &expected_size
);
4074 if (expected_align
< dest_align
)
4075 expected_align
= dest_align
;
4077 /* If the LEN parameter is zero, return DEST. */
4078 if (integer_zerop (len
))
4080 /* Evaluate and ignore VAL in case it has side-effects. */
4081 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4082 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
4085 /* Stabilize the arguments in case we fail. */
4086 dest
= builtin_save_expr (dest
);
4087 val
= builtin_save_expr (val
);
4088 len
= builtin_save_expr (len
);
4090 len_rtx
= expand_normal (len
);
4091 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
4092 &probable_max_size
);
4093 dest_mem
= get_memory_rtx (dest
, len
);
4094 val_mode
= TYPE_MODE (unsigned_char_type_node
);
4096 if (TREE_CODE (val
) != INTEGER_CST
)
4100 val_rtx
= expand_normal (val
);
4101 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
4103 /* Assume that we can memset by pieces if we can store
4104 * the coefficients by pieces (in the required modes).
4105 * We can't pass builtin_memset_gen_str as that emits RTL. */
4107 if (tree_fits_uhwi_p (len
)
4108 && can_store_by_pieces (tree_to_uhwi (len
),
4109 builtin_memset_read_str
, &c
, dest_align
,
4112 val_rtx
= force_reg (val_mode
, val_rtx
);
4113 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
4114 builtin_memset_gen_str
, val_rtx
, dest_align
,
4117 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
4118 dest_align
, expected_align
,
4119 expected_size
, min_size
, max_size
,
4123 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4124 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4128 if (target_char_cast (val
, &c
))
4133 if (tree_fits_uhwi_p (len
)
4134 && can_store_by_pieces (tree_to_uhwi (len
),
4135 builtin_memset_read_str
, &c
, dest_align
,
4137 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
4138 builtin_memset_read_str
, &c
, dest_align
, true, 0);
4139 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
4140 gen_int_mode (c
, val_mode
),
4141 dest_align
, expected_align
,
4142 expected_size
, min_size
, max_size
,
4146 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4147 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4151 set_mem_align (dest_mem
, dest_align
);
4152 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
4153 CALL_EXPR_TAILCALL (orig_exp
)
4154 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
4155 expected_align
, expected_size
,
4161 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4162 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
4168 fndecl
= get_callee_fndecl (orig_exp
);
4169 fcode
= DECL_FUNCTION_CODE (fndecl
);
4170 if (fcode
== BUILT_IN_MEMSET
4171 || fcode
== BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
)
4172 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
4174 else if (fcode
== BUILT_IN_BZERO
)
4175 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
4179 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4180 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
4181 return expand_call (fn
, target
, target
== const0_rtx
);
4184 /* Expand expression EXP, which is a call to the bzero builtin. Return
4185 NULL_RTX if we failed the caller should emit a normal call. */
4188 expand_builtin_bzero (tree exp
)
4190 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4193 tree dest
= CALL_EXPR_ARG (exp
, 0);
4194 tree size
= CALL_EXPR_ARG (exp
, 1);
4196 check_memop_sizes (exp
, dest
, NULL_TREE
, size
);
4198 /* New argument list transforming bzero(ptr x, int y) to
4199 memset(ptr x, int 0, size_t y). This is done this way
4200 so that if it isn't expanded inline, we fallback to
4201 calling bzero instead of memset. */
4203 location_t loc
= EXPR_LOCATION (exp
);
4205 return expand_builtin_memset_args (dest
, integer_zero_node
,
4206 fold_convert_loc (loc
,
4207 size_type_node
, size
),
4208 const0_rtx
, VOIDmode
, exp
);
4211 /* Try to expand cmpstr operation ICODE with the given operands.
4212 Return the result rtx on success, otherwise return null. */
4215 expand_cmpstr (insn_code icode
, rtx target
, rtx arg1_rtx
, rtx arg2_rtx
,
4216 HOST_WIDE_INT align
)
4218 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
4220 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
4223 struct expand_operand ops
[4];
4224 create_output_operand (&ops
[0], target
, insn_mode
);
4225 create_fixed_operand (&ops
[1], arg1_rtx
);
4226 create_fixed_operand (&ops
[2], arg2_rtx
);
4227 create_integer_operand (&ops
[3], align
);
4228 if (maybe_expand_insn (icode
, 4, ops
))
4229 return ops
[0].value
;
4233 /* Expand expression EXP, which is a call to the memcmp built-in function.
4234 Return NULL_RTX if we failed and the caller should emit a normal call,
4235 otherwise try to get the result in TARGET, if convenient.
4236 RESULT_EQ is true if we can relax the returned value to be either zero
4237 or nonzero, without caring about the sign. */
4240 expand_builtin_memcmp (tree exp
, rtx target
, bool result_eq
)
4242 if (!validate_arglist (exp
,
4243 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4246 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4247 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4248 tree len
= CALL_EXPR_ARG (exp
, 2);
4250 /* Diagnose calls where the specified length exceeds the size of either
4252 if (warn_stringop_overflow
)
4254 tree size
= compute_objsize (arg1
, 0);
4255 if (check_sizes (OPT_Wstringop_overflow_
,
4256 exp
, len
, /*maxlen=*/NULL_TREE
,
4257 size
, /*objsize=*/NULL_TREE
))
4259 size
= compute_objsize (arg2
, 0);
4260 check_sizes (OPT_Wstringop_overflow_
,
4261 exp
, len
, /*maxlen=*/NULL_TREE
,
4262 size
, /*objsize=*/NULL_TREE
);
4266 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4267 location_t loc
= EXPR_LOCATION (exp
);
4269 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4270 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4272 /* If we don't have POINTER_TYPE, call the function. */
4273 if (arg1_align
== 0 || arg2_align
== 0)
4276 rtx arg1_rtx
= get_memory_rtx (arg1
, len
);
4277 rtx arg2_rtx
= get_memory_rtx (arg2
, len
);
4278 rtx len_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
4280 /* Set MEM_SIZE as appropriate. */
4281 if (CONST_INT_P (len_rtx
))
4283 set_mem_size (arg1_rtx
, INTVAL (len_rtx
));
4284 set_mem_size (arg2_rtx
, INTVAL (len_rtx
));
4287 by_pieces_constfn constfn
= NULL
;
4289 const char *src_str
= c_getstr (arg2
);
4290 if (result_eq
&& src_str
== NULL
)
4292 src_str
= c_getstr (arg1
);
4293 if (src_str
!= NULL
)
4294 std::swap (arg1_rtx
, arg2_rtx
);
4297 /* If SRC is a string constant and block move would be done
4298 by pieces, we can avoid loading the string from memory
4299 and only stored the computed constants. */
4301 && CONST_INT_P (len_rtx
)
4302 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1)
4303 constfn
= builtin_memcpy_read_str
;
4305 rtx result
= emit_block_cmp_hints (arg1_rtx
, arg2_rtx
, len_rtx
,
4306 TREE_TYPE (len
), target
,
4308 CONST_CAST (char *, src_str
));
4312 /* Return the value in the proper mode for this function. */
4313 if (GET_MODE (result
) == mode
)
4318 convert_move (target
, result
, 0);
4322 return convert_to_mode (mode
, result
, 0);
4328 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4329 if we failed the caller should emit a normal call, otherwise try to get
4330 the result in TARGET, if convenient. */
4333 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
4335 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4338 insn_code cmpstr_icode
= direct_optab_handler (cmpstr_optab
, SImode
);
4339 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4340 if (cmpstr_icode
!= CODE_FOR_nothing
|| cmpstrn_icode
!= CODE_FOR_nothing
)
4342 rtx arg1_rtx
, arg2_rtx
;
4344 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4345 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4346 rtx result
= NULL_RTX
;
4348 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4349 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4351 /* If we don't have POINTER_TYPE, call the function. */
4352 if (arg1_align
== 0 || arg2_align
== 0)
4355 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4356 arg1
= builtin_save_expr (arg1
);
4357 arg2
= builtin_save_expr (arg2
);
4359 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4360 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4362 /* Try to call cmpstrsi. */
4363 if (cmpstr_icode
!= CODE_FOR_nothing
)
4364 result
= expand_cmpstr (cmpstr_icode
, target
, arg1_rtx
, arg2_rtx
,
4365 MIN (arg1_align
, arg2_align
));
4367 /* Try to determine at least one length and call cmpstrnsi. */
4368 if (!result
&& cmpstrn_icode
!= CODE_FOR_nothing
)
4373 tree len1
= c_strlen (arg1
, 1);
4374 tree len2
= c_strlen (arg2
, 1);
4377 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4379 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4381 /* If we don't have a constant length for the first, use the length
4382 of the second, if we know it. We don't require a constant for
4383 this case; some cost analysis could be done if both are available
4384 but neither is constant. For now, assume they're equally cheap,
4385 unless one has side effects. If both strings have constant lengths,
4392 else if (TREE_SIDE_EFFECTS (len1
))
4394 else if (TREE_SIDE_EFFECTS (len2
))
4396 else if (TREE_CODE (len1
) != INTEGER_CST
)
4398 else if (TREE_CODE (len2
) != INTEGER_CST
)
4400 else if (tree_int_cst_lt (len1
, len2
))
4405 /* If both arguments have side effects, we cannot optimize. */
4406 if (len
&& !TREE_SIDE_EFFECTS (len
))
4408 arg3_rtx
= expand_normal (len
);
4409 result
= expand_cmpstrn_or_cmpmem
4410 (cmpstrn_icode
, target
, arg1_rtx
, arg2_rtx
, TREE_TYPE (len
),
4411 arg3_rtx
, MIN (arg1_align
, arg2_align
));
4417 /* Return the value in the proper mode for this function. */
4418 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4419 if (GET_MODE (result
) == mode
)
4422 return convert_to_mode (mode
, result
, 0);
4423 convert_move (target
, result
, 0);
4427 /* Expand the library call ourselves using a stabilized argument
4428 list to avoid re-evaluating the function's arguments twice. */
4429 fndecl
= get_callee_fndecl (exp
);
4430 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
4431 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4432 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4433 return expand_call (fn
, target
, target
== const0_rtx
);
4438 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4439 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4440 the result in TARGET, if convenient. */
4443 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
4444 ATTRIBUTE_UNUSED machine_mode mode
)
4446 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
4448 if (!validate_arglist (exp
,
4449 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4452 /* If c_strlen can determine an expression for one of the string
4453 lengths, and it doesn't have side effects, then emit cmpstrnsi
4454 using length MIN(strlen(string)+1, arg3). */
4455 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4456 if (cmpstrn_icode
!= CODE_FOR_nothing
)
4458 tree len
, len1
, len2
, len3
;
4459 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4462 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4463 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4464 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4466 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4467 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4469 len1
= c_strlen (arg1
, 1);
4470 len2
= c_strlen (arg2
, 1);
4473 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
4475 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
4477 len3
= fold_convert_loc (loc
, sizetype
, arg3
);
4479 /* If we don't have a constant length for the first, use the length
4480 of the second, if we know it. If neither string is constant length,
4481 use the given length argument. We don't require a constant for
4482 this case; some cost analysis could be done if both are available
4483 but neither is constant. For now, assume they're equally cheap,
4484 unless one has side effects. If both strings have constant lengths,
4493 else if (TREE_SIDE_EFFECTS (len1
))
4495 else if (TREE_SIDE_EFFECTS (len2
))
4497 else if (TREE_CODE (len1
) != INTEGER_CST
)
4499 else if (TREE_CODE (len2
) != INTEGER_CST
)
4501 else if (tree_int_cst_lt (len1
, len2
))
4506 /* If we are not using the given length, we must incorporate it here.
4507 The actual new length parameter will be MIN(len,arg3) in this case. */
4509 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
, len3
);
4510 arg1_rtx
= get_memory_rtx (arg1
, len
);
4511 arg2_rtx
= get_memory_rtx (arg2
, len
);
4512 arg3_rtx
= expand_normal (len
);
4513 result
= expand_cmpstrn_or_cmpmem (cmpstrn_icode
, target
, arg1_rtx
,
4514 arg2_rtx
, TREE_TYPE (len
), arg3_rtx
,
4515 MIN (arg1_align
, arg2_align
));
4518 /* Return the value in the proper mode for this function. */
4519 mode
= TYPE_MODE (TREE_TYPE (exp
));
4520 if (GET_MODE (result
) == mode
)
4523 return convert_to_mode (mode
, result
, 0);
4524 convert_move (target
, result
, 0);
4528 /* Expand the library call ourselves using a stabilized argument
4529 list to avoid re-evaluating the function's arguments twice. */
4530 fndecl
= get_callee_fndecl (exp
);
4531 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 3,
4533 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4534 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4535 return expand_call (fn
, target
, target
== const0_rtx
);
4540 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4541 if that's convenient. */
4544 expand_builtin_saveregs (void)
4549 /* Don't do __builtin_saveregs more than once in a function.
4550 Save the result of the first call and reuse it. */
4551 if (saveregs_value
!= 0)
4552 return saveregs_value
;
4554 /* When this function is called, it means that registers must be
4555 saved on entry to this function. So we migrate the call to the
4556 first insn of this function. */
4560 /* Do whatever the machine needs done in this case. */
4561 val
= targetm
.calls
.expand_builtin_saveregs ();
4566 saveregs_value
= val
;
4568 /* Put the insns after the NOTE that starts the function. If this
4569 is inside a start_sequence, make the outer-level insn chain current, so
4570 the code is placed at the start of the function. */
4571 push_topmost_sequence ();
4572 emit_insn_after (seq
, entry_of_function ());
4573 pop_topmost_sequence ();
4578 /* Expand a call to __builtin_next_arg. */
4581 expand_builtin_next_arg (void)
4583 /* Checking arguments is already done in fold_builtin_next_arg
4584 that must be called before this function. */
4585 return expand_binop (ptr_mode
, add_optab
,
4586 crtl
->args
.internal_arg_pointer
,
4587 crtl
->args
.arg_offset_rtx
,
4588 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4591 /* Make it easier for the backends by protecting the valist argument
4592 from multiple evaluations. */
4595 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4597 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4599 /* The current way of determining the type of valist is completely
4600 bogus. We should have the information on the va builtin instead. */
4602 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4604 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4606 if (TREE_SIDE_EFFECTS (valist
))
4607 valist
= save_expr (valist
);
4609 /* For this case, the backends will be expecting a pointer to
4610 vatype, but it's possible we've actually been given an array
4611 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4613 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4615 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4616 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4621 tree pt
= build_pointer_type (vatype
);
4625 if (! TREE_SIDE_EFFECTS (valist
))
4628 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4629 TREE_SIDE_EFFECTS (valist
) = 1;
4632 if (TREE_SIDE_EFFECTS (valist
))
4633 valist
= save_expr (valist
);
4634 valist
= fold_build2_loc (loc
, MEM_REF
,
4635 vatype
, valist
, build_int_cst (pt
, 0));
4641 /* The "standard" definition of va_list is void*. */
4644 std_build_builtin_va_list (void)
4646 return ptr_type_node
;
4649 /* The "standard" abi va_list is va_list_type_node. */
4652 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4654 return va_list_type_node
;
4657 /* The "standard" type of va_list is va_list_type_node. */
4660 std_canonical_va_list_type (tree type
)
4664 wtype
= va_list_type_node
;
4667 if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4669 /* If va_list is an array type, the argument may have decayed
4670 to a pointer type, e.g. by being passed to another function.
4671 In that case, unwrap both types so that we can compare the
4672 underlying records. */
4673 if (TREE_CODE (htype
) == ARRAY_TYPE
4674 || POINTER_TYPE_P (htype
))
4676 wtype
= TREE_TYPE (wtype
);
4677 htype
= TREE_TYPE (htype
);
4680 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4681 return va_list_type_node
;
4686 /* The "standard" implementation of va_start: just assign `nextarg' to
4690 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4692 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4693 convert_move (va_r
, nextarg
, 0);
4695 /* We do not have any valid bounds for the pointer, so
4696 just store zero bounds for it. */
4697 if (chkp_function_instrumented_p (current_function_decl
))
4698 chkp_expand_bounds_reset_for_mem (valist
,
4699 make_tree (TREE_TYPE (valist
),
4703 /* Expand EXP, a call to __builtin_va_start. */
4706 expand_builtin_va_start (tree exp
)
4710 location_t loc
= EXPR_LOCATION (exp
);
4712 if (call_expr_nargs (exp
) < 2)
4714 error_at (loc
, "too few arguments to function %<va_start%>");
4718 if (fold_builtin_next_arg (exp
, true))
4721 nextarg
= expand_builtin_next_arg ();
4722 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4724 if (targetm
.expand_builtin_va_start
)
4725 targetm
.expand_builtin_va_start (valist
, nextarg
);
4727 std_expand_builtin_va_start (valist
, nextarg
);
4732 /* Expand EXP, a call to __builtin_va_end. */
4735 expand_builtin_va_end (tree exp
)
4737 tree valist
= CALL_EXPR_ARG (exp
, 0);
4739 /* Evaluate for side effects, if needed. I hate macros that don't
4741 if (TREE_SIDE_EFFECTS (valist
))
4742 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4747 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4748 builtin rather than just as an assignment in stdarg.h because of the
4749 nastiness of array-type va_list types. */
4752 expand_builtin_va_copy (tree exp
)
4755 location_t loc
= EXPR_LOCATION (exp
);
4757 dst
= CALL_EXPR_ARG (exp
, 0);
4758 src
= CALL_EXPR_ARG (exp
, 1);
4760 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4761 src
= stabilize_va_list_loc (loc
, src
, 0);
4763 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4765 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4767 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4768 TREE_SIDE_EFFECTS (t
) = 1;
4769 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4773 rtx dstb
, srcb
, size
;
4775 /* Evaluate to pointers. */
4776 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4777 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4778 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4779 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4781 dstb
= convert_memory_address (Pmode
, dstb
);
4782 srcb
= convert_memory_address (Pmode
, srcb
);
4784 /* "Dereference" to BLKmode memories. */
4785 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4786 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4787 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4788 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4789 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4790 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4793 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4799 /* Expand a call to one of the builtin functions __builtin_frame_address or
4800 __builtin_return_address. */
4803 expand_builtin_frame_address (tree fndecl
, tree exp
)
4805 /* The argument must be a nonnegative integer constant.
4806 It counts the number of frames to scan up the stack.
4807 The value is either the frame pointer value or the return
4808 address saved in that frame. */
4809 if (call_expr_nargs (exp
) == 0)
4810 /* Warning about missing arg was already issued. */
4812 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp
, 0)))
4814 error ("invalid argument to %qD", fndecl
);
4819 /* Number of frames to scan up the stack. */
4820 unsigned HOST_WIDE_INT count
= tree_to_uhwi (CALL_EXPR_ARG (exp
, 0));
4822 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
), count
);
4824 /* Some ports cannot access arbitrary stack frames. */
4827 warning (0, "unsupported argument to %qD", fndecl
);
4833 /* Warn since no effort is made to ensure that any frame
4834 beyond the current one exists or can be safely reached. */
4835 warning (OPT_Wframe_address
, "calling %qD with "
4836 "a nonzero argument is unsafe", fndecl
);
4839 /* For __builtin_frame_address, return what we've got. */
4840 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4844 && ! CONSTANT_P (tem
))
4845 tem
= copy_addr_to_reg (tem
);
4850 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4851 failed and the caller should emit a normal call. */
4854 expand_builtin_alloca (tree exp
)
4859 tree fndecl
= get_callee_fndecl (exp
);
4860 bool alloca_with_align
= (DECL_FUNCTION_CODE (fndecl
)
4861 == BUILT_IN_ALLOCA_WITH_ALIGN
);
4862 bool alloca_for_var
= CALL_ALLOCA_FOR_VAR_P (exp
);
4864 = (alloca_with_align
4865 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4866 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
4871 if ((alloca_with_align
&& !warn_vla_limit
)
4872 || (!alloca_with_align
&& !warn_alloca_limit
))
4874 /* -Walloca-larger-than and -Wvla-larger-than settings override
4875 the more general -Walloc-size-larger-than so unless either of
4876 the former options is specified check the alloca arguments for
4878 tree args
[] = { CALL_EXPR_ARG (exp
, 0), NULL_TREE
};
4879 int idx
[] = { 0, -1 };
4880 maybe_warn_alloc_args_overflow (fndecl
, exp
, args
, idx
);
4883 /* Compute the argument. */
4884 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4886 /* Compute the alignment. */
4887 align
= (alloca_with_align
4888 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1))
4889 : BIGGEST_ALIGNMENT
);
4891 /* Allocate the desired space. If the allocation stems from the declaration
4892 of a variable-sized object, it cannot accumulate. */
4893 result
= allocate_dynamic_stack_space (op0
, 0, align
, alloca_for_var
);
4894 result
= convert_memory_address (ptr_mode
, result
);
4899 /* Emit a call to __asan_allocas_unpoison call in EXP. Replace second argument
4900 of the call with virtual_stack_dynamic_rtx because in asan pass we emit a
4901 dummy value into second parameter relying on this function to perform the
4902 change. See motivation for this in comment to handle_builtin_stack_restore
4906 expand_asan_emit_allocas_unpoison (tree exp
)
4908 tree arg0
= CALL_EXPR_ARG (exp
, 0);
4909 rtx top
= expand_expr (arg0
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
4910 rtx bot
= convert_memory_address (ptr_mode
, virtual_stack_dynamic_rtx
);
4911 rtx ret
= init_one_libfunc ("__asan_allocas_unpoison");
4912 ret
= emit_library_call_value (ret
, NULL_RTX
, LCT_NORMAL
, ptr_mode
,
4913 top
, ptr_mode
, bot
, ptr_mode
);
4917 /* Expand a call to bswap builtin in EXP.
4918 Return NULL_RTX if a normal call should be emitted rather than expanding the
4919 function in-line. If convenient, the result should be placed in TARGET.
4920 SUBTARGET may be used as the target for computing one of EXP's operands. */
4923 expand_builtin_bswap (machine_mode target_mode
, tree exp
, rtx target
,
4929 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4932 arg
= CALL_EXPR_ARG (exp
, 0);
4933 op0
= expand_expr (arg
,
4934 subtarget
&& GET_MODE (subtarget
) == target_mode
4935 ? subtarget
: NULL_RTX
,
4936 target_mode
, EXPAND_NORMAL
);
4937 if (GET_MODE (op0
) != target_mode
)
4938 op0
= convert_to_mode (target_mode
, op0
, 1);
4940 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
4942 gcc_assert (target
);
4944 return convert_to_mode (target_mode
, target
, 1);
4947 /* Expand a call to a unary builtin in EXP.
4948 Return NULL_RTX if a normal call should be emitted rather than expanding the
4949 function in-line. If convenient, the result should be placed in TARGET.
4950 SUBTARGET may be used as the target for computing one of EXP's operands. */
4953 expand_builtin_unop (machine_mode target_mode
, tree exp
, rtx target
,
4954 rtx subtarget
, optab op_optab
)
4958 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4961 /* Compute the argument. */
4962 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
4964 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
4965 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
4966 VOIDmode
, EXPAND_NORMAL
);
4967 /* Compute op, into TARGET if possible.
4968 Set TARGET to wherever the result comes back. */
4969 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
4970 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
4971 gcc_assert (target
);
4973 return convert_to_mode (target_mode
, target
, 0);
4976 /* Expand a call to __builtin_expect. We just return our argument
4977 as the builtin_expect semantic should've been already executed by
4978 tree branch prediction pass. */
4981 expand_builtin_expect (tree exp
, rtx target
)
4985 if (call_expr_nargs (exp
) < 2)
4987 arg
= CALL_EXPR_ARG (exp
, 0);
4989 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4990 /* When guessing was done, the hints should be already stripped away. */
4991 gcc_assert (!flag_guess_branch_prob
4992 || optimize
== 0 || seen_error ());
4996 /* Expand a call to __builtin_assume_aligned. We just return our first
4997 argument as the builtin_assume_aligned semantic should've been already
5001 expand_builtin_assume_aligned (tree exp
, rtx target
)
5003 if (call_expr_nargs (exp
) < 2)
5005 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
5007 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
5008 && (call_expr_nargs (exp
) < 3
5009 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
5014 expand_builtin_trap (void)
5016 if (targetm
.have_trap ())
5018 rtx_insn
*insn
= emit_insn (targetm
.gen_trap ());
5019 /* For trap insns when not accumulating outgoing args force
5020 REG_ARGS_SIZE note to prevent crossjumping of calls with
5021 different args sizes. */
5022 if (!ACCUMULATE_OUTGOING_ARGS
)
5023 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
5027 tree fn
= builtin_decl_implicit (BUILT_IN_ABORT
);
5028 tree call_expr
= build_call_expr (fn
, 0);
5029 expand_call (call_expr
, NULL_RTX
, false);
5035 /* Expand a call to __builtin_unreachable. We do nothing except emit
5036 a barrier saying that control flow will not pass here.
5038 It is the responsibility of the program being compiled to ensure
5039 that control flow does never reach __builtin_unreachable. */
5041 expand_builtin_unreachable (void)
5046 /* Expand EXP, a call to fabs, fabsf or fabsl.
5047 Return NULL_RTX if a normal call should be emitted rather than expanding
5048 the function inline. If convenient, the result should be placed
5049 in TARGET. SUBTARGET may be used as the target for computing
5053 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
5059 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5062 arg
= CALL_EXPR_ARG (exp
, 0);
5063 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
5064 mode
= TYPE_MODE (TREE_TYPE (arg
));
5065 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5066 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
5069 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5070 Return NULL is a normal call should be emitted rather than expanding the
5071 function inline. If convenient, the result should be placed in TARGET.
5072 SUBTARGET may be used as the target for computing the operand. */
5075 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
5080 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
5083 arg
= CALL_EXPR_ARG (exp
, 0);
5084 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5086 arg
= CALL_EXPR_ARG (exp
, 1);
5087 op1
= expand_normal (arg
);
5089 return expand_copysign (op0
, op1
, target
);
5092 /* Expand a call to __builtin___clear_cache. */
5095 expand_builtin___clear_cache (tree exp
)
5097 if (!targetm
.code_for_clear_cache
)
5099 #ifdef CLEAR_INSN_CACHE
5100 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5101 does something. Just do the default expansion to a call to
5105 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5106 does nothing. There is no need to call it. Do nothing. */
5108 #endif /* CLEAR_INSN_CACHE */
5111 /* We have a "clear_cache" insn, and it will handle everything. */
5113 rtx begin_rtx
, end_rtx
;
5115 /* We must not expand to a library call. If we did, any
5116 fallback library function in libgcc that might contain a call to
5117 __builtin___clear_cache() would recurse infinitely. */
5118 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
5120 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5124 if (targetm
.have_clear_cache ())
5126 struct expand_operand ops
[2];
5128 begin
= CALL_EXPR_ARG (exp
, 0);
5129 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5131 end
= CALL_EXPR_ARG (exp
, 1);
5132 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5134 create_address_operand (&ops
[0], begin_rtx
);
5135 create_address_operand (&ops
[1], end_rtx
);
5136 if (maybe_expand_insn (targetm
.code_for_clear_cache
, 2, ops
))
5142 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5145 round_trampoline_addr (rtx tramp
)
5147 rtx temp
, addend
, mask
;
5149 /* If we don't need too much alignment, we'll have been guaranteed
5150 proper alignment by get_trampoline_type. */
5151 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
5154 /* Round address up to desired boundary. */
5155 temp
= gen_reg_rtx (Pmode
);
5156 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
5157 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
5159 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
5160 temp
, 0, OPTAB_LIB_WIDEN
);
5161 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
5162 temp
, 0, OPTAB_LIB_WIDEN
);
5168 expand_builtin_init_trampoline (tree exp
, bool onstack
)
5170 tree t_tramp
, t_func
, t_chain
;
5171 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
5173 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
5174 POINTER_TYPE
, VOID_TYPE
))
5177 t_tramp
= CALL_EXPR_ARG (exp
, 0);
5178 t_func
= CALL_EXPR_ARG (exp
, 1);
5179 t_chain
= CALL_EXPR_ARG (exp
, 2);
5181 r_tramp
= expand_normal (t_tramp
);
5182 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
5183 MEM_NOTRAP_P (m_tramp
) = 1;
5185 /* If ONSTACK, the TRAMP argument should be the address of a field
5186 within the local function's FRAME decl. Either way, let's see if
5187 we can fill in the MEM_ATTRs for this memory. */
5188 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
5189 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
5191 /* Creator of a heap trampoline is responsible for making sure the
5192 address is aligned to at least STACK_BOUNDARY. Normally malloc
5193 will ensure this anyhow. */
5194 tmp
= round_trampoline_addr (r_tramp
);
5197 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
5198 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
5199 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
5202 /* The FUNC argument should be the address of the nested function.
5203 Extract the actual function decl to pass to the hook. */
5204 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
5205 t_func
= TREE_OPERAND (t_func
, 0);
5206 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
5208 r_chain
= expand_normal (t_chain
);
5210 /* Generate insns to initialize the trampoline. */
5211 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
5215 trampolines_created
= 1;
5217 if (targetm
.calls
.custom_function_descriptors
!= 0)
5218 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
5219 "trampoline generated for nested function %qD", t_func
);
5226 expand_builtin_adjust_trampoline (tree exp
)
5230 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5233 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5234 tramp
= round_trampoline_addr (tramp
);
5235 if (targetm
.calls
.trampoline_adjust_address
)
5236 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
5241 /* Expand a call to the builtin descriptor initialization routine.
5242 A descriptor is made up of a couple of pointers to the static
5243 chain and the code entry in this order. */
5246 expand_builtin_init_descriptor (tree exp
)
5248 tree t_descr
, t_func
, t_chain
;
5249 rtx m_descr
, r_descr
, r_func
, r_chain
;
5251 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, POINTER_TYPE
,
5255 t_descr
= CALL_EXPR_ARG (exp
, 0);
5256 t_func
= CALL_EXPR_ARG (exp
, 1);
5257 t_chain
= CALL_EXPR_ARG (exp
, 2);
5259 r_descr
= expand_normal (t_descr
);
5260 m_descr
= gen_rtx_MEM (BLKmode
, r_descr
);
5261 MEM_NOTRAP_P (m_descr
) = 1;
5263 r_func
= expand_normal (t_func
);
5264 r_chain
= expand_normal (t_chain
);
5266 /* Generate insns to initialize the descriptor. */
5267 emit_move_insn (adjust_address_nv (m_descr
, ptr_mode
, 0), r_chain
);
5268 emit_move_insn (adjust_address_nv (m_descr
, ptr_mode
,
5269 POINTER_SIZE
/ BITS_PER_UNIT
), r_func
);
5274 /* Expand a call to the builtin descriptor adjustment routine. */
5277 expand_builtin_adjust_descriptor (tree exp
)
5281 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5284 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5286 /* Unalign the descriptor to allow runtime identification. */
5287 tramp
= plus_constant (ptr_mode
, tramp
,
5288 targetm
.calls
.custom_function_descriptors
);
5290 return force_operand (tramp
, NULL_RTX
);
5293 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5294 function. The function first checks whether the back end provides
5295 an insn to implement signbit for the respective mode. If not, it
5296 checks whether the floating point format of the value is such that
5297 the sign bit can be extracted. If that is not the case, error out.
5298 EXP is the expression that is a call to the builtin function; if
5299 convenient, the result should be placed in TARGET. */
5301 expand_builtin_signbit (tree exp
, rtx target
)
5303 const struct real_format
*fmt
;
5304 scalar_float_mode fmode
;
5305 scalar_int_mode rmode
, imode
;
5308 enum insn_code icode
;
5310 location_t loc
= EXPR_LOCATION (exp
);
5312 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5315 arg
= CALL_EXPR_ARG (exp
, 0);
5316 fmode
= SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg
));
5317 rmode
= SCALAR_INT_TYPE_MODE (TREE_TYPE (exp
));
5318 fmt
= REAL_MODE_FORMAT (fmode
);
5320 arg
= builtin_save_expr (arg
);
5322 /* Expand the argument yielding a RTX expression. */
5323 temp
= expand_normal (arg
);
5325 /* Check if the back end provides an insn that handles signbit for the
5327 icode
= optab_handler (signbit_optab
, fmode
);
5328 if (icode
!= CODE_FOR_nothing
)
5330 rtx_insn
*last
= get_last_insn ();
5331 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
5332 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
5334 delete_insns_since (last
);
5337 /* For floating point formats without a sign bit, implement signbit
5339 bitpos
= fmt
->signbit_ro
;
5342 /* But we can't do this if the format supports signed zero. */
5343 gcc_assert (!fmt
->has_signed_zero
|| !HONOR_SIGNED_ZEROS (fmode
));
5345 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
5346 build_real (TREE_TYPE (arg
), dconst0
));
5347 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5350 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
5352 imode
= int_mode_for_mode (fmode
).require ();
5353 temp
= gen_lowpart (imode
, temp
);
5358 /* Handle targets with different FP word orders. */
5359 if (FLOAT_WORDS_BIG_ENDIAN
)
5360 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
5362 word
= bitpos
/ BITS_PER_WORD
;
5363 temp
= operand_subword_force (temp
, word
, fmode
);
5364 bitpos
= bitpos
% BITS_PER_WORD
;
5367 /* Force the intermediate word_mode (or narrower) result into a
5368 register. This avoids attempting to create paradoxical SUBREGs
5369 of floating point modes below. */
5370 temp
= force_reg (imode
, temp
);
5372 /* If the bitpos is within the "result mode" lowpart, the operation
5373 can be implement with a single bitwise AND. Otherwise, we need
5374 a right shift and an AND. */
5376 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5378 wide_int mask
= wi::set_bit_in_zero (bitpos
, GET_MODE_PRECISION (rmode
));
5380 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
5381 temp
= gen_lowpart (rmode
, temp
);
5382 temp
= expand_binop (rmode
, and_optab
, temp
,
5383 immed_wide_int_const (mask
, rmode
),
5384 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5388 /* Perform a logical right shift to place the signbit in the least
5389 significant bit, then truncate the result to the desired mode
5390 and mask just this bit. */
5391 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
5392 temp
= gen_lowpart (rmode
, temp
);
5393 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5394 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5400 /* Expand fork or exec calls. TARGET is the desired target of the
5401 call. EXP is the call. FN is the
5402 identificator of the actual function. IGNORE is nonzero if the
5403 value is to be ignored. */
5406 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5411 /* If we are not profiling, just call the function. */
5412 if (!profile_arc_flag
)
5415 /* Otherwise call the wrapper. This should be equivalent for the rest of
5416 compiler, so the code does not diverge, and the wrapper may run the
5417 code necessary for keeping the profiling sane. */
5419 switch (DECL_FUNCTION_CODE (fn
))
5422 id
= get_identifier ("__gcov_fork");
5425 case BUILT_IN_EXECL
:
5426 id
= get_identifier ("__gcov_execl");
5429 case BUILT_IN_EXECV
:
5430 id
= get_identifier ("__gcov_execv");
5433 case BUILT_IN_EXECLP
:
5434 id
= get_identifier ("__gcov_execlp");
5437 case BUILT_IN_EXECLE
:
5438 id
= get_identifier ("__gcov_execle");
5441 case BUILT_IN_EXECVP
:
5442 id
= get_identifier ("__gcov_execvp");
5445 case BUILT_IN_EXECVE
:
5446 id
= get_identifier ("__gcov_execve");
5453 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
5454 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5455 DECL_EXTERNAL (decl
) = 1;
5456 TREE_PUBLIC (decl
) = 1;
5457 DECL_ARTIFICIAL (decl
) = 1;
5458 TREE_NOTHROW (decl
) = 1;
5459 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5460 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5461 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
5462 return expand_call (call
, target
, ignore
);
5467 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5468 the pointer in these functions is void*, the tree optimizers may remove
5469 casts. The mode computed in expand_builtin isn't reliable either, due
5470 to __sync_bool_compare_and_swap.
5472 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5473 group of builtins. This gives us log2 of the mode size. */
5475 static inline machine_mode
5476 get_builtin_sync_mode (int fcode_diff
)
5478 /* The size is not negotiable, so ask not to get BLKmode in return
5479 if the target indicates that a smaller size would be better. */
5480 return int_mode_for_size (BITS_PER_UNIT
<< fcode_diff
, 0).require ();
5483 /* Expand the memory expression LOC and return the appropriate memory operand
5484 for the builtin_sync operations. */
5487 get_builtin_sync_mem (tree loc
, machine_mode mode
)
5491 addr
= expand_expr (loc
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
5492 addr
= convert_memory_address (Pmode
, addr
);
5494 /* Note that we explicitly do not want any alias information for this
5495 memory, so that we kill all other live memories. Otherwise we don't
5496 satisfy the full barrier semantics of the intrinsic. */
5497 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5499 /* The alignment needs to be at least according to that of the mode. */
5500 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
5501 get_pointer_alignment (loc
)));
5502 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5503 MEM_VOLATILE_P (mem
) = 1;
5508 /* Make sure an argument is in the right mode.
5509 EXP is the tree argument.
5510 MODE is the mode it should be in. */
5513 expand_expr_force_mode (tree exp
, machine_mode mode
)
5516 machine_mode old_mode
;
5518 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
5519 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5520 of CONST_INTs, where we know the old_mode only from the call argument. */
5522 old_mode
= GET_MODE (val
);
5523 if (old_mode
== VOIDmode
)
5524 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
5525 val
= convert_modes (mode
, old_mode
, val
, 1);
5530 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5531 EXP is the CALL_EXPR. CODE is the rtx code
5532 that corresponds to the arithmetic or logical operation from the name;
5533 an exception here is that NOT actually means NAND. TARGET is an optional
5534 place for us to store the results; AFTER is true if this is the
5535 fetch_and_xxx form. */
5538 expand_builtin_sync_operation (machine_mode mode
, tree exp
,
5539 enum rtx_code code
, bool after
,
5543 location_t loc
= EXPR_LOCATION (exp
);
5545 if (code
== NOT
&& warn_sync_nand
)
5547 tree fndecl
= get_callee_fndecl (exp
);
5548 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5550 static bool warned_f_a_n
, warned_n_a_f
;
5554 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
5555 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
5556 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
5557 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
5558 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
5562 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
5563 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5564 warned_f_a_n
= true;
5567 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
5568 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
5569 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
5570 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
5571 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
5575 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
5576 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5577 warned_n_a_f
= true;
5585 /* Expand the operands. */
5586 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5587 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5589 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SYNC_SEQ_CST
,
5593 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5594 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5595 true if this is the boolean form. TARGET is a place for us to store the
5596 results; this is NOT optional if IS_BOOL is true. */
5599 expand_builtin_compare_and_swap (machine_mode mode
, tree exp
,
5600 bool is_bool
, rtx target
)
5602 rtx old_val
, new_val
, mem
;
5605 /* Expand the operands. */
5606 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5607 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5608 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5610 pbool
= poval
= NULL
;
5611 if (target
!= const0_rtx
)
5618 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
5619 false, MEMMODEL_SYNC_SEQ_CST
,
5620 MEMMODEL_SYNC_SEQ_CST
))
5626 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5627 general form is actually an atomic exchange, and some targets only
5628 support a reduced form with the second argument being a constant 1.
5629 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5633 expand_builtin_sync_lock_test_and_set (machine_mode mode
, tree exp
,
5638 /* Expand the operands. */
5639 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5640 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5642 return expand_sync_lock_test_and_set (target
, mem
, val
);
5645 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5648 expand_builtin_sync_lock_release (machine_mode mode
, tree exp
)
5652 /* Expand the operands. */
5653 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5655 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_SYNC_RELEASE
, true);
5658 /* Given an integer representing an ``enum memmodel'', verify its
5659 correctness and return the memory model enum. */
5661 static enum memmodel
5662 get_memmodel (tree exp
)
5665 unsigned HOST_WIDE_INT val
;
5667 = expansion_point_location_if_in_system_header (input_location
);
5669 /* If the parameter is not a constant, it's a run time value so we'll just
5670 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5671 if (TREE_CODE (exp
) != INTEGER_CST
)
5672 return MEMMODEL_SEQ_CST
;
5674 op
= expand_normal (exp
);
5677 if (targetm
.memmodel_check
)
5678 val
= targetm
.memmodel_check (val
);
5679 else if (val
& ~MEMMODEL_MASK
)
5681 warning_at (loc
, OPT_Winvalid_memory_model
,
5682 "unknown architecture specifier in memory model to builtin");
5683 return MEMMODEL_SEQ_CST
;
5686 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5687 if (memmodel_base (val
) >= MEMMODEL_LAST
)
5689 warning_at (loc
, OPT_Winvalid_memory_model
,
5690 "invalid memory model argument to builtin");
5691 return MEMMODEL_SEQ_CST
;
5694 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5695 be conservative and promote consume to acquire. */
5696 if (val
== MEMMODEL_CONSUME
)
5697 val
= MEMMODEL_ACQUIRE
;
5699 return (enum memmodel
) val
;
5702 /* Expand the __atomic_exchange intrinsic:
5703 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5704 EXP is the CALL_EXPR.
5705 TARGET is an optional place for us to store the results. */
5708 expand_builtin_atomic_exchange (machine_mode mode
, tree exp
, rtx target
)
5711 enum memmodel model
;
5713 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5715 if (!flag_inline_atomics
)
5718 /* Expand the operands. */
5719 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5720 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5722 return expand_atomic_exchange (target
, mem
, val
, model
);
5725 /* Expand the __atomic_compare_exchange intrinsic:
5726 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5727 TYPE desired, BOOL weak,
5728 enum memmodel success,
5729 enum memmodel failure)
5730 EXP is the CALL_EXPR.
5731 TARGET is an optional place for us to store the results. */
5734 expand_builtin_atomic_compare_exchange (machine_mode mode
, tree exp
,
5737 rtx expect
, desired
, mem
, oldval
;
5738 rtx_code_label
*label
;
5739 enum memmodel success
, failure
;
5743 = expansion_point_location_if_in_system_header (input_location
);
5745 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
5746 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
5748 if (failure
> success
)
5750 warning_at (loc
, OPT_Winvalid_memory_model
,
5751 "failure memory model cannot be stronger than success "
5752 "memory model for %<__atomic_compare_exchange%>");
5753 success
= MEMMODEL_SEQ_CST
;
5756 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
5758 warning_at (loc
, OPT_Winvalid_memory_model
,
5759 "invalid failure memory model for "
5760 "%<__atomic_compare_exchange%>");
5761 failure
= MEMMODEL_SEQ_CST
;
5762 success
= MEMMODEL_SEQ_CST
;
5766 if (!flag_inline_atomics
)
5769 /* Expand the operands. */
5770 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5772 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
5773 expect
= convert_memory_address (Pmode
, expect
);
5774 expect
= gen_rtx_MEM (mode
, expect
);
5775 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5777 weak
= CALL_EXPR_ARG (exp
, 3);
5779 if (tree_fits_shwi_p (weak
) && tree_to_shwi (weak
) != 0)
5782 if (target
== const0_rtx
)
5785 /* Lest the rtl backend create a race condition with an imporoper store
5786 to memory, always create a new pseudo for OLDVAL. */
5789 if (!expand_atomic_compare_and_swap (&target
, &oldval
, mem
, expect
, desired
,
5790 is_weak
, success
, failure
))
5793 /* Conditionally store back to EXPECT, lest we create a race condition
5794 with an improper store to memory. */
5795 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5796 the normal case where EXPECT is totally private, i.e. a register. At
5797 which point the store can be unconditional. */
5798 label
= gen_label_rtx ();
5799 emit_cmp_and_jump_insns (target
, const0_rtx
, NE
, NULL
,
5800 GET_MODE (target
), 1, label
);
5801 emit_move_insn (expect
, oldval
);
5807 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5808 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5809 call. The weak parameter must be dropped to match the expected parameter
5810 list and the expected argument changed from value to pointer to memory
5814 expand_ifn_atomic_compare_exchange_into_call (gcall
*call
, machine_mode mode
)
5817 vec
<tree
, va_gc
> *vec
;
5820 vec
->quick_push (gimple_call_arg (call
, 0));
5821 tree expected
= gimple_call_arg (call
, 1);
5822 rtx x
= assign_stack_temp_for_type (mode
, GET_MODE_SIZE (mode
),
5823 TREE_TYPE (expected
));
5824 rtx expd
= expand_expr (expected
, x
, mode
, EXPAND_NORMAL
);
5826 emit_move_insn (x
, expd
);
5827 tree v
= make_tree (TREE_TYPE (expected
), x
);
5828 vec
->quick_push (build1 (ADDR_EXPR
,
5829 build_pointer_type (TREE_TYPE (expected
)), v
));
5830 vec
->quick_push (gimple_call_arg (call
, 2));
5831 /* Skip the boolean weak parameter. */
5832 for (z
= 4; z
< 6; z
++)
5833 vec
->quick_push (gimple_call_arg (call
, z
));
5834 built_in_function fncode
5835 = (built_in_function
) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5836 + exact_log2 (GET_MODE_SIZE (mode
)));
5837 tree fndecl
= builtin_decl_explicit (fncode
);
5838 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fndecl
)),
5840 tree exp
= build_call_vec (boolean_type_node
, fn
, vec
);
5841 tree lhs
= gimple_call_lhs (call
);
5842 rtx boolret
= expand_call (exp
, NULL_RTX
, lhs
== NULL_TREE
);
5845 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5846 if (GET_MODE (boolret
) != mode
)
5847 boolret
= convert_modes (mode
, GET_MODE (boolret
), boolret
, 1);
5848 x
= force_reg (mode
, x
);
5849 write_complex_part (target
, boolret
, true);
5850 write_complex_part (target
, x
, false);
5854 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5857 expand_ifn_atomic_compare_exchange (gcall
*call
)
5859 int size
= tree_to_shwi (gimple_call_arg (call
, 3)) & 255;
5860 gcc_assert (size
== 1 || size
== 2 || size
== 4 || size
== 8 || size
== 16);
5861 machine_mode mode
= int_mode_for_size (BITS_PER_UNIT
* size
, 0).require ();
5862 rtx expect
, desired
, mem
, oldval
, boolret
;
5863 enum memmodel success
, failure
;
5867 = expansion_point_location_if_in_system_header (gimple_location (call
));
5869 success
= get_memmodel (gimple_call_arg (call
, 4));
5870 failure
= get_memmodel (gimple_call_arg (call
, 5));
5872 if (failure
> success
)
5874 warning_at (loc
, OPT_Winvalid_memory_model
,
5875 "failure memory model cannot be stronger than success "
5876 "memory model for %<__atomic_compare_exchange%>");
5877 success
= MEMMODEL_SEQ_CST
;
5880 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
5882 warning_at (loc
, OPT_Winvalid_memory_model
,
5883 "invalid failure memory model for "
5884 "%<__atomic_compare_exchange%>");
5885 failure
= MEMMODEL_SEQ_CST
;
5886 success
= MEMMODEL_SEQ_CST
;
5889 if (!flag_inline_atomics
)
5891 expand_ifn_atomic_compare_exchange_into_call (call
, mode
);
5895 /* Expand the operands. */
5896 mem
= get_builtin_sync_mem (gimple_call_arg (call
, 0), mode
);
5898 expect
= expand_expr_force_mode (gimple_call_arg (call
, 1), mode
);
5899 desired
= expand_expr_force_mode (gimple_call_arg (call
, 2), mode
);
5901 is_weak
= (tree_to_shwi (gimple_call_arg (call
, 3)) & 256) != 0;
5906 if (!expand_atomic_compare_and_swap (&boolret
, &oldval
, mem
, expect
, desired
,
5907 is_weak
, success
, failure
))
5909 expand_ifn_atomic_compare_exchange_into_call (call
, mode
);
5913 lhs
= gimple_call_lhs (call
);
5916 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5917 if (GET_MODE (boolret
) != mode
)
5918 boolret
= convert_modes (mode
, GET_MODE (boolret
), boolret
, 1);
5919 write_complex_part (target
, boolret
, true);
5920 write_complex_part (target
, oldval
, false);
5924 /* Expand the __atomic_load intrinsic:
5925 TYPE __atomic_load (TYPE *object, enum memmodel)
5926 EXP is the CALL_EXPR.
5927 TARGET is an optional place for us to store the results. */
5930 expand_builtin_atomic_load (machine_mode mode
, tree exp
, rtx target
)
5933 enum memmodel model
;
5935 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5936 if (is_mm_release (model
) || is_mm_acq_rel (model
))
5939 = expansion_point_location_if_in_system_header (input_location
);
5940 warning_at (loc
, OPT_Winvalid_memory_model
,
5941 "invalid memory model for %<__atomic_load%>");
5942 model
= MEMMODEL_SEQ_CST
;
5945 if (!flag_inline_atomics
)
5948 /* Expand the operand. */
5949 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5951 return expand_atomic_load (target
, mem
, model
);
5955 /* Expand the __atomic_store intrinsic:
5956 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5957 EXP is the CALL_EXPR.
5958 TARGET is an optional place for us to store the results. */
5961 expand_builtin_atomic_store (machine_mode mode
, tree exp
)
5964 enum memmodel model
;
5966 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5967 if (!(is_mm_relaxed (model
) || is_mm_seq_cst (model
)
5968 || is_mm_release (model
)))
5971 = expansion_point_location_if_in_system_header (input_location
);
5972 warning_at (loc
, OPT_Winvalid_memory_model
,
5973 "invalid memory model for %<__atomic_store%>");
5974 model
= MEMMODEL_SEQ_CST
;
5977 if (!flag_inline_atomics
)
5980 /* Expand the operands. */
5981 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5982 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5984 return expand_atomic_store (mem
, val
, model
, false);
5987 /* Expand the __atomic_fetch_XXX intrinsic:
5988 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5989 EXP is the CALL_EXPR.
5990 TARGET is an optional place for us to store the results.
5991 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5992 FETCH_AFTER is true if returning the result of the operation.
5993 FETCH_AFTER is false if returning the value before the operation.
5994 IGNORE is true if the result is not used.
5995 EXT_CALL is the correct builtin for an external call if this cannot be
5996 resolved to an instruction sequence. */
5999 expand_builtin_atomic_fetch_op (machine_mode mode
, tree exp
, rtx target
,
6000 enum rtx_code code
, bool fetch_after
,
6001 bool ignore
, enum built_in_function ext_call
)
6004 enum memmodel model
;
6008 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
6010 /* Expand the operands. */
6011 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6012 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6014 /* Only try generating instructions if inlining is turned on. */
6015 if (flag_inline_atomics
)
6017 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
6022 /* Return if a different routine isn't needed for the library call. */
6023 if (ext_call
== BUILT_IN_NONE
)
6026 /* Change the call to the specified function. */
6027 fndecl
= get_callee_fndecl (exp
);
6028 addr
= CALL_EXPR_FN (exp
);
6031 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
6032 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
6034 /* If we will emit code after the call, the call can not be a tail call.
6035 If it is emitted as a tail call, a barrier is emitted after it, and
6036 then all trailing code is removed. */
6038 CALL_EXPR_TAILCALL (exp
) = 0;
6040 /* Expand the call here so we can emit trailing code. */
6041 ret
= expand_call (exp
, target
, ignore
);
6043 /* Replace the original function just in case it matters. */
6044 TREE_OPERAND (addr
, 0) = fndecl
;
6046 /* Then issue the arithmetic correction to return the right result. */
6051 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
6053 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
6056 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
6062 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6065 expand_ifn_atomic_bit_test_and (gcall
*call
)
6067 tree ptr
= gimple_call_arg (call
, 0);
6068 tree bit
= gimple_call_arg (call
, 1);
6069 tree flag
= gimple_call_arg (call
, 2);
6070 tree lhs
= gimple_call_lhs (call
);
6071 enum memmodel model
= MEMMODEL_SYNC_SEQ_CST
;
6072 machine_mode mode
= TYPE_MODE (TREE_TYPE (flag
));
6075 struct expand_operand ops
[5];
6077 gcc_assert (flag_inline_atomics
);
6079 if (gimple_call_num_args (call
) == 4)
6080 model
= get_memmodel (gimple_call_arg (call
, 3));
6082 rtx mem
= get_builtin_sync_mem (ptr
, mode
);
6083 rtx val
= expand_expr_force_mode (bit
, mode
);
6085 switch (gimple_call_internal_fn (call
))
6087 case IFN_ATOMIC_BIT_TEST_AND_SET
:
6089 optab
= atomic_bit_test_and_set_optab
;
6091 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT
:
6093 optab
= atomic_bit_test_and_complement_optab
;
6095 case IFN_ATOMIC_BIT_TEST_AND_RESET
:
6097 optab
= atomic_bit_test_and_reset_optab
;
6103 if (lhs
== NULL_TREE
)
6105 val
= expand_simple_binop (mode
, ASHIFT
, const1_rtx
,
6106 val
, NULL_RTX
, true, OPTAB_DIRECT
);
6108 val
= expand_simple_unop (mode
, NOT
, val
, NULL_RTX
, true);
6109 expand_atomic_fetch_op (const0_rtx
, mem
, val
, code
, model
, false);
6113 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
6114 enum insn_code icode
= direct_optab_handler (optab
, mode
);
6115 gcc_assert (icode
!= CODE_FOR_nothing
);
6116 create_output_operand (&ops
[0], target
, mode
);
6117 create_fixed_operand (&ops
[1], mem
);
6118 create_convert_operand_to (&ops
[2], val
, mode
, true);
6119 create_integer_operand (&ops
[3], model
);
6120 create_integer_operand (&ops
[4], integer_onep (flag
));
6121 if (maybe_expand_insn (icode
, 5, ops
))
6125 val
= expand_simple_binop (mode
, ASHIFT
, const1_rtx
,
6126 val
, NULL_RTX
, true, OPTAB_DIRECT
);
6129 val
= expand_simple_unop (mode
, NOT
, val
, NULL_RTX
, true);
6130 rtx result
= expand_atomic_fetch_op (gen_reg_rtx (mode
), mem
, val
,
6131 code
, model
, false);
6132 if (integer_onep (flag
))
6134 result
= expand_simple_binop (mode
, ASHIFTRT
, result
, bitval
,
6135 NULL_RTX
, true, OPTAB_DIRECT
);
6136 result
= expand_simple_binop (mode
, AND
, result
, const1_rtx
, target
,
6137 true, OPTAB_DIRECT
);
6140 result
= expand_simple_binop (mode
, AND
, result
, maskval
, target
, true,
6142 if (result
!= target
)
6143 emit_move_insn (target
, result
);
6146 /* Expand an atomic clear operation.
6147 void _atomic_clear (BOOL *obj, enum memmodel)
6148 EXP is the call expression. */
6151 expand_builtin_atomic_clear (tree exp
)
6155 enum memmodel model
;
6157 mode
= int_mode_for_size (BOOL_TYPE_SIZE
, 0).require ();
6158 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6159 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
6161 if (is_mm_consume (model
) || is_mm_acquire (model
) || is_mm_acq_rel (model
))
6164 = expansion_point_location_if_in_system_header (input_location
);
6165 warning_at (loc
, OPT_Winvalid_memory_model
,
6166 "invalid memory model for %<__atomic_store%>");
6167 model
= MEMMODEL_SEQ_CST
;
6170 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6171 Failing that, a store is issued by __atomic_store. The only way this can
6172 fail is if the bool type is larger than a word size. Unlikely, but
6173 handle it anyway for completeness. Assume a single threaded model since
6174 there is no atomic support in this case, and no barriers are required. */
6175 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
6177 emit_move_insn (mem
, const0_rtx
);
6181 /* Expand an atomic test_and_set operation.
6182 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6183 EXP is the call expression. */
6186 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
6189 enum memmodel model
;
6192 mode
= int_mode_for_size (BOOL_TYPE_SIZE
, 0).require ();
6193 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6194 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
6196 return expand_atomic_test_and_set (target
, mem
, model
);
6200 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6201 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6204 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
6208 unsigned int mode_align
, type_align
;
6210 if (TREE_CODE (arg0
) != INTEGER_CST
)
6213 /* We need a corresponding integer mode for the access to be lock-free. */
6214 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
6215 if (!int_mode_for_size (size
, 0).exists (&mode
))
6216 return boolean_false_node
;
6218 mode_align
= GET_MODE_ALIGNMENT (mode
);
6220 if (TREE_CODE (arg1
) == INTEGER_CST
)
6222 unsigned HOST_WIDE_INT val
= UINTVAL (expand_normal (arg1
));
6224 /* Either this argument is null, or it's a fake pointer encoding
6225 the alignment of the object. */
6226 val
= least_bit_hwi (val
);
6227 val
*= BITS_PER_UNIT
;
6229 if (val
== 0 || mode_align
< val
)
6230 type_align
= mode_align
;
6236 tree ttype
= TREE_TYPE (arg1
);
6238 /* This function is usually invoked and folded immediately by the front
6239 end before anything else has a chance to look at it. The pointer
6240 parameter at this point is usually cast to a void *, so check for that
6241 and look past the cast. */
6242 if (CONVERT_EXPR_P (arg1
)
6243 && POINTER_TYPE_P (ttype
)
6244 && VOID_TYPE_P (TREE_TYPE (ttype
))
6245 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
6246 arg1
= TREE_OPERAND (arg1
, 0);
6248 ttype
= TREE_TYPE (arg1
);
6249 gcc_assert (POINTER_TYPE_P (ttype
));
6251 /* Get the underlying type of the object. */
6252 ttype
= TREE_TYPE (ttype
);
6253 type_align
= TYPE_ALIGN (ttype
);
6256 /* If the object has smaller alignment, the lock free routines cannot
6258 if (type_align
< mode_align
)
6259 return boolean_false_node
;
6261 /* Check if a compare_and_swap pattern exists for the mode which represents
6262 the required size. The pattern is not allowed to fail, so the existence
6263 of the pattern indicates support is present. Also require that an
6264 atomic load exists for the required size. */
6265 if (can_compare_and_swap_p (mode
, true) && can_atomic_load_p (mode
))
6266 return boolean_true_node
;
6268 return boolean_false_node
;
6271 /* Return true if the parameters to call EXP represent an object which will
6272 always generate lock free instructions. The first argument represents the
6273 size of the object, and the second parameter is a pointer to the object
6274 itself. If NULL is passed for the object, then the result is based on
6275 typical alignment for an object of the specified size. Otherwise return
6279 expand_builtin_atomic_always_lock_free (tree exp
)
6282 tree arg0
= CALL_EXPR_ARG (exp
, 0);
6283 tree arg1
= CALL_EXPR_ARG (exp
, 1);
6285 if (TREE_CODE (arg0
) != INTEGER_CST
)
6287 error ("non-constant argument 1 to __atomic_always_lock_free");
6291 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
6292 if (size
== boolean_true_node
)
6297 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6298 is lock free on this architecture. */
6301 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
6303 if (!flag_inline_atomics
)
6306 /* If it isn't always lock free, don't generate a result. */
6307 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
6308 return boolean_true_node
;
6313 /* Return true if the parameters to call EXP represent an object which will
6314 always generate lock free instructions. The first argument represents the
6315 size of the object, and the second parameter is a pointer to the object
6316 itself. If NULL is passed for the object, then the result is based on
6317 typical alignment for an object of the specified size. Otherwise return
6321 expand_builtin_atomic_is_lock_free (tree exp
)
6324 tree arg0
= CALL_EXPR_ARG (exp
, 0);
6325 tree arg1
= CALL_EXPR_ARG (exp
, 1);
6327 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
6329 error ("non-integer argument 1 to __atomic_is_lock_free");
6333 if (!flag_inline_atomics
)
6336 /* If the value is known at compile time, return the RTX for it. */
6337 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
6338 if (size
== boolean_true_node
)
6344 /* Expand the __atomic_thread_fence intrinsic:
6345 void __atomic_thread_fence (enum memmodel)
6346 EXP is the CALL_EXPR. */
6349 expand_builtin_atomic_thread_fence (tree exp
)
6351 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
6352 expand_mem_thread_fence (model
);
6355 /* Expand the __atomic_signal_fence intrinsic:
6356 void __atomic_signal_fence (enum memmodel)
6357 EXP is the CALL_EXPR. */
6360 expand_builtin_atomic_signal_fence (tree exp
)
6362 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
6363 expand_mem_signal_fence (model
);
6366 /* Expand the __sync_synchronize intrinsic. */
6369 expand_builtin_sync_synchronize (void)
6371 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST
);
6375 expand_builtin_thread_pointer (tree exp
, rtx target
)
6377 enum insn_code icode
;
6378 if (!validate_arglist (exp
, VOID_TYPE
))
6380 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
6381 if (icode
!= CODE_FOR_nothing
)
6383 struct expand_operand op
;
6384 /* If the target is not sutitable then create a new target. */
6385 if (target
== NULL_RTX
6387 || GET_MODE (target
) != Pmode
)
6388 target
= gen_reg_rtx (Pmode
);
6389 create_output_operand (&op
, target
, Pmode
);
6390 expand_insn (icode
, 1, &op
);
6393 error ("__builtin_thread_pointer is not supported on this target");
6398 expand_builtin_set_thread_pointer (tree exp
)
6400 enum insn_code icode
;
6401 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6403 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
6404 if (icode
!= CODE_FOR_nothing
)
6406 struct expand_operand op
;
6407 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
6408 Pmode
, EXPAND_NORMAL
);
6409 create_input_operand (&op
, val
, Pmode
);
6410 expand_insn (icode
, 1, &op
);
6413 error ("__builtin_set_thread_pointer is not supported on this target");
6417 /* Emit code to restore the current value of stack. */
6420 expand_stack_restore (tree var
)
6423 rtx sa
= expand_normal (var
);
6425 sa
= convert_memory_address (Pmode
, sa
);
6427 prev
= get_last_insn ();
6428 emit_stack_restore (SAVE_BLOCK
, sa
);
6430 record_new_stack_level ();
6432 fixup_args_size_notes (prev
, get_last_insn (), 0);
6435 /* Emit code to save the current value of stack. */
6438 expand_stack_save (void)
6442 emit_stack_save (SAVE_BLOCK
, &ret
);
6447 /* Expand an expression EXP that calls a built-in function,
6448 with result going to TARGET if that's convenient
6449 (and in mode MODE if that's convenient).
6450 SUBTARGET may be used as the target for computing one of EXP's operands.
6451 IGNORE is nonzero if the value is to be ignored. */
6454 expand_builtin (tree exp
, rtx target
, rtx subtarget
, machine_mode mode
,
6457 tree fndecl
= get_callee_fndecl (exp
);
6458 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
6459 machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
6462 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6463 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
6465 /* When ASan is enabled, we don't want to expand some memory/string
6466 builtins and rely on libsanitizer's hooks. This allows us to avoid
6467 redundant checks and be sure, that possible overflow will be detected
6470 if ((flag_sanitize
& SANITIZE_ADDRESS
) && asan_intercepted_p (fcode
))
6471 return expand_call (exp
, target
, ignore
);
6473 /* When not optimizing, generate calls to library functions for a certain
6476 && !called_as_built_in (fndecl
)
6477 && fcode
!= BUILT_IN_FORK
6478 && fcode
!= BUILT_IN_EXECL
6479 && fcode
!= BUILT_IN_EXECV
6480 && fcode
!= BUILT_IN_EXECLP
6481 && fcode
!= BUILT_IN_EXECLE
6482 && fcode
!= BUILT_IN_EXECVP
6483 && fcode
!= BUILT_IN_EXECVE
6484 && fcode
!= BUILT_IN_ALLOCA
6485 && fcode
!= BUILT_IN_ALLOCA_WITH_ALIGN
6486 && fcode
!= BUILT_IN_FREE
6487 && fcode
!= BUILT_IN_CHKP_SET_PTR_BOUNDS
6488 && fcode
!= BUILT_IN_CHKP_INIT_PTR_BOUNDS
6489 && fcode
!= BUILT_IN_CHKP_NULL_PTR_BOUNDS
6490 && fcode
!= BUILT_IN_CHKP_COPY_PTR_BOUNDS
6491 && fcode
!= BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6492 && fcode
!= BUILT_IN_CHKP_STORE_PTR_BOUNDS
6493 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6494 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6495 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6496 && fcode
!= BUILT_IN_CHKP_GET_PTR_LBOUND
6497 && fcode
!= BUILT_IN_CHKP_GET_PTR_UBOUND
6498 && fcode
!= BUILT_IN_CHKP_BNDRET
)
6499 return expand_call (exp
, target
, ignore
);
6501 /* The built-in function expanders test for target == const0_rtx
6502 to determine whether the function's result will be ignored. */
6504 target
= const0_rtx
;
6506 /* If the result of a pure or const built-in function is ignored, and
6507 none of its arguments are volatile, we can avoid expanding the
6508 built-in call and just evaluate the arguments for side-effects. */
6509 if (target
== const0_rtx
6510 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
6511 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
6513 bool volatilep
= false;
6515 call_expr_arg_iterator iter
;
6517 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
6518 if (TREE_THIS_VOLATILE (arg
))
6526 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
6527 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6532 /* expand_builtin_with_bounds is supposed to be used for
6533 instrumented builtin calls. */
6534 gcc_assert (!CALL_WITH_BOUNDS_P (exp
));
6538 CASE_FLT_FN (BUILT_IN_FABS
):
6539 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
6540 case BUILT_IN_FABSD32
:
6541 case BUILT_IN_FABSD64
:
6542 case BUILT_IN_FABSD128
:
6543 target
= expand_builtin_fabs (exp
, target
, subtarget
);
6548 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
6549 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN
):
6550 target
= expand_builtin_copysign (exp
, target
, subtarget
);
6555 /* Just do a normal library call if we were unable to fold
6557 CASE_FLT_FN (BUILT_IN_CABS
):
6560 CASE_FLT_FN (BUILT_IN_FMA
):
6561 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
6566 CASE_FLT_FN (BUILT_IN_ILOGB
):
6567 if (! flag_unsafe_math_optimizations
)
6570 CASE_FLT_FN (BUILT_IN_ISINF
):
6571 CASE_FLT_FN (BUILT_IN_FINITE
):
6572 case BUILT_IN_ISFINITE
:
6573 case BUILT_IN_ISNORMAL
:
6574 target
= expand_builtin_interclass_mathfn (exp
, target
);
6579 CASE_FLT_FN (BUILT_IN_ICEIL
):
6580 CASE_FLT_FN (BUILT_IN_LCEIL
):
6581 CASE_FLT_FN (BUILT_IN_LLCEIL
):
6582 CASE_FLT_FN (BUILT_IN_LFLOOR
):
6583 CASE_FLT_FN (BUILT_IN_IFLOOR
):
6584 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
6585 target
= expand_builtin_int_roundingfn (exp
, target
);
6590 CASE_FLT_FN (BUILT_IN_IRINT
):
6591 CASE_FLT_FN (BUILT_IN_LRINT
):
6592 CASE_FLT_FN (BUILT_IN_LLRINT
):
6593 CASE_FLT_FN (BUILT_IN_IROUND
):
6594 CASE_FLT_FN (BUILT_IN_LROUND
):
6595 CASE_FLT_FN (BUILT_IN_LLROUND
):
6596 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
6601 CASE_FLT_FN (BUILT_IN_POWI
):
6602 target
= expand_builtin_powi (exp
, target
);
6607 CASE_FLT_FN (BUILT_IN_CEXPI
):
6608 target
= expand_builtin_cexpi (exp
, target
);
6609 gcc_assert (target
);
6612 CASE_FLT_FN (BUILT_IN_SIN
):
6613 CASE_FLT_FN (BUILT_IN_COS
):
6614 if (! flag_unsafe_math_optimizations
)
6616 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
6621 CASE_FLT_FN (BUILT_IN_SINCOS
):
6622 if (! flag_unsafe_math_optimizations
)
6624 target
= expand_builtin_sincos (exp
);
6629 case BUILT_IN_APPLY_ARGS
:
6630 return expand_builtin_apply_args ();
6632 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6633 FUNCTION with a copy of the parameters described by
6634 ARGUMENTS, and ARGSIZE. It returns a block of memory
6635 allocated on the stack into which is stored all the registers
6636 that might possibly be used for returning the result of a
6637 function. ARGUMENTS is the value returned by
6638 __builtin_apply_args. ARGSIZE is the number of bytes of
6639 arguments that must be copied. ??? How should this value be
6640 computed? We'll also need a safe worst case value for varargs
6642 case BUILT_IN_APPLY
:
6643 if (!validate_arglist (exp
, POINTER_TYPE
,
6644 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
6645 && !validate_arglist (exp
, REFERENCE_TYPE
,
6646 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6652 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
6653 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
6654 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
6656 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6659 /* __builtin_return (RESULT) causes the function to return the
6660 value described by RESULT. RESULT is address of the block of
6661 memory returned by __builtin_apply. */
6662 case BUILT_IN_RETURN
:
6663 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6664 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6667 case BUILT_IN_SAVEREGS
:
6668 return expand_builtin_saveregs ();
6670 case BUILT_IN_VA_ARG_PACK
:
6671 /* All valid uses of __builtin_va_arg_pack () are removed during
6673 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
6676 case BUILT_IN_VA_ARG_PACK_LEN
:
6677 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6679 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
6682 /* Return the address of the first anonymous stack arg. */
6683 case BUILT_IN_NEXT_ARG
:
6684 if (fold_builtin_next_arg (exp
, false))
6686 return expand_builtin_next_arg ();
6688 case BUILT_IN_CLEAR_CACHE
:
6689 target
= expand_builtin___clear_cache (exp
);
6694 case BUILT_IN_CLASSIFY_TYPE
:
6695 return expand_builtin_classify_type (exp
);
6697 case BUILT_IN_CONSTANT_P
:
6700 case BUILT_IN_FRAME_ADDRESS
:
6701 case BUILT_IN_RETURN_ADDRESS
:
6702 return expand_builtin_frame_address (fndecl
, exp
);
6704 /* Returns the address of the area where the structure is returned.
6706 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6707 if (call_expr_nargs (exp
) != 0
6708 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6709 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6712 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6714 case BUILT_IN_ALLOCA
:
6715 case BUILT_IN_ALLOCA_WITH_ALIGN
:
6716 target
= expand_builtin_alloca (exp
);
6721 case BUILT_IN_ASAN_ALLOCAS_UNPOISON
:
6722 return expand_asan_emit_allocas_unpoison (exp
);
6724 case BUILT_IN_STACK_SAVE
:
6725 return expand_stack_save ();
6727 case BUILT_IN_STACK_RESTORE
:
6728 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6731 case BUILT_IN_BSWAP16
:
6732 case BUILT_IN_BSWAP32
:
6733 case BUILT_IN_BSWAP64
:
6734 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
6739 CASE_INT_FN (BUILT_IN_FFS
):
6740 target
= expand_builtin_unop (target_mode
, exp
, target
,
6741 subtarget
, ffs_optab
);
6746 CASE_INT_FN (BUILT_IN_CLZ
):
6747 target
= expand_builtin_unop (target_mode
, exp
, target
,
6748 subtarget
, clz_optab
);
6753 CASE_INT_FN (BUILT_IN_CTZ
):
6754 target
= expand_builtin_unop (target_mode
, exp
, target
,
6755 subtarget
, ctz_optab
);
6760 CASE_INT_FN (BUILT_IN_CLRSB
):
6761 target
= expand_builtin_unop (target_mode
, exp
, target
,
6762 subtarget
, clrsb_optab
);
6767 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6768 target
= expand_builtin_unop (target_mode
, exp
, target
,
6769 subtarget
, popcount_optab
);
6774 CASE_INT_FN (BUILT_IN_PARITY
):
6775 target
= expand_builtin_unop (target_mode
, exp
, target
,
6776 subtarget
, parity_optab
);
6781 case BUILT_IN_STRLEN
:
6782 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6787 case BUILT_IN_STRCAT
:
6788 target
= expand_builtin_strcat (exp
, target
);
6793 case BUILT_IN_STRCPY
:
6794 target
= expand_builtin_strcpy (exp
, target
);
6799 case BUILT_IN_STRNCAT
:
6800 target
= expand_builtin_strncat (exp
, target
);
6805 case BUILT_IN_STRNCPY
:
6806 target
= expand_builtin_strncpy (exp
, target
);
6811 case BUILT_IN_STPCPY
:
6812 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6817 case BUILT_IN_STPNCPY
:
6818 target
= expand_builtin_stpncpy (exp
, target
);
6823 case BUILT_IN_MEMCHR
:
6824 target
= expand_builtin_memchr (exp
, target
);
6829 case BUILT_IN_MEMCPY
:
6830 target
= expand_builtin_memcpy (exp
, target
);
6835 case BUILT_IN_MEMMOVE
:
6836 target
= expand_builtin_memmove (exp
, target
);
6841 case BUILT_IN_MEMPCPY
:
6842 target
= expand_builtin_mempcpy (exp
, target
);
6847 case BUILT_IN_MEMSET
:
6848 target
= expand_builtin_memset (exp
, target
, mode
);
6853 case BUILT_IN_BZERO
:
6854 target
= expand_builtin_bzero (exp
);
6859 case BUILT_IN_STRCMP
:
6860 target
= expand_builtin_strcmp (exp
, target
);
6865 case BUILT_IN_STRNCMP
:
6866 target
= expand_builtin_strncmp (exp
, target
, mode
);
6872 case BUILT_IN_MEMCMP
:
6873 case BUILT_IN_MEMCMP_EQ
:
6874 target
= expand_builtin_memcmp (exp
, target
, fcode
== BUILT_IN_MEMCMP_EQ
);
6877 if (fcode
== BUILT_IN_MEMCMP_EQ
)
6879 tree newdecl
= builtin_decl_explicit (BUILT_IN_MEMCMP
);
6880 TREE_OPERAND (exp
, 1) = build_fold_addr_expr (newdecl
);
6884 case BUILT_IN_SETJMP
:
6885 /* This should have been lowered to the builtins below. */
6888 case BUILT_IN_SETJMP_SETUP
:
6889 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6890 and the receiver label. */
6891 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6893 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6894 VOIDmode
, EXPAND_NORMAL
);
6895 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6896 rtx_insn
*label_r
= label_rtx (label
);
6898 /* This is copied from the handling of non-local gotos. */
6899 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6900 nonlocal_goto_handler_labels
6901 = gen_rtx_INSN_LIST (VOIDmode
, label_r
,
6902 nonlocal_goto_handler_labels
);
6903 /* ??? Do not let expand_label treat us as such since we would
6904 not want to be both on the list of non-local labels and on
6905 the list of forced labels. */
6906 FORCED_LABEL (label
) = 0;
6911 case BUILT_IN_SETJMP_RECEIVER
:
6912 /* __builtin_setjmp_receiver is passed the receiver label. */
6913 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6915 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6916 rtx_insn
*label_r
= label_rtx (label
);
6918 expand_builtin_setjmp_receiver (label_r
);
6923 /* __builtin_longjmp is passed a pointer to an array of five words.
6924 It's similar to the C library longjmp function but works with
6925 __builtin_setjmp above. */
6926 case BUILT_IN_LONGJMP
:
6927 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6929 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6930 VOIDmode
, EXPAND_NORMAL
);
6931 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6933 if (value
!= const1_rtx
)
6935 error ("%<__builtin_longjmp%> second argument must be 1");
6939 expand_builtin_longjmp (buf_addr
, value
);
6944 case BUILT_IN_NONLOCAL_GOTO
:
6945 target
= expand_builtin_nonlocal_goto (exp
);
6950 /* This updates the setjmp buffer that is its argument with the value
6951 of the current stack pointer. */
6952 case BUILT_IN_UPDATE_SETJMP_BUF
:
6953 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6956 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6958 expand_builtin_update_setjmp_buf (buf_addr
);
6964 expand_builtin_trap ();
6967 case BUILT_IN_UNREACHABLE
:
6968 expand_builtin_unreachable ();
6971 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6972 case BUILT_IN_SIGNBITD32
:
6973 case BUILT_IN_SIGNBITD64
:
6974 case BUILT_IN_SIGNBITD128
:
6975 target
= expand_builtin_signbit (exp
, target
);
6980 /* Various hooks for the DWARF 2 __throw routine. */
6981 case BUILT_IN_UNWIND_INIT
:
6982 expand_builtin_unwind_init ();
6984 case BUILT_IN_DWARF_CFA
:
6985 return virtual_cfa_rtx
;
6986 #ifdef DWARF2_UNWIND_INFO
6987 case BUILT_IN_DWARF_SP_COLUMN
:
6988 return expand_builtin_dwarf_sp_column ();
6989 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6990 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6993 case BUILT_IN_FROB_RETURN_ADDR
:
6994 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6995 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6996 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6997 case BUILT_IN_EH_RETURN
:
6998 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6999 CALL_EXPR_ARG (exp
, 1));
7001 case BUILT_IN_EH_RETURN_DATA_REGNO
:
7002 return expand_builtin_eh_return_data_regno (exp
);
7003 case BUILT_IN_EXTEND_POINTER
:
7004 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
7005 case BUILT_IN_EH_POINTER
:
7006 return expand_builtin_eh_pointer (exp
);
7007 case BUILT_IN_EH_FILTER
:
7008 return expand_builtin_eh_filter (exp
);
7009 case BUILT_IN_EH_COPY_VALUES
:
7010 return expand_builtin_eh_copy_values (exp
);
7012 case BUILT_IN_VA_START
:
7013 return expand_builtin_va_start (exp
);
7014 case BUILT_IN_VA_END
:
7015 return expand_builtin_va_end (exp
);
7016 case BUILT_IN_VA_COPY
:
7017 return expand_builtin_va_copy (exp
);
7018 case BUILT_IN_EXPECT
:
7019 return expand_builtin_expect (exp
, target
);
7020 case BUILT_IN_ASSUME_ALIGNED
:
7021 return expand_builtin_assume_aligned (exp
, target
);
7022 case BUILT_IN_PREFETCH
:
7023 expand_builtin_prefetch (exp
);
7026 case BUILT_IN_INIT_TRAMPOLINE
:
7027 return expand_builtin_init_trampoline (exp
, true);
7028 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
7029 return expand_builtin_init_trampoline (exp
, false);
7030 case BUILT_IN_ADJUST_TRAMPOLINE
:
7031 return expand_builtin_adjust_trampoline (exp
);
7033 case BUILT_IN_INIT_DESCRIPTOR
:
7034 return expand_builtin_init_descriptor (exp
);
7035 case BUILT_IN_ADJUST_DESCRIPTOR
:
7036 return expand_builtin_adjust_descriptor (exp
);
7039 case BUILT_IN_EXECL
:
7040 case BUILT_IN_EXECV
:
7041 case BUILT_IN_EXECLP
:
7042 case BUILT_IN_EXECLE
:
7043 case BUILT_IN_EXECVP
:
7044 case BUILT_IN_EXECVE
:
7045 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
7050 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
7051 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
7052 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
7053 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
7054 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
7055 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
7056 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
7061 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
7062 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
7063 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
7064 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
7065 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
7066 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
7067 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
7072 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
7073 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
7074 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
7075 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
7076 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
7077 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
7078 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
7083 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
7084 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
7085 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
7086 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
7087 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
7088 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
7089 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
7094 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
7095 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
7096 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
7097 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
7098 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
7099 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
7100 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
7105 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
7106 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
7107 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
7108 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
7109 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
7110 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
7111 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
7116 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
7117 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
7118 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
7119 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
7120 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
7121 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
7122 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
7127 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
7128 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
7129 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
7130 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
7131 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
7132 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
7133 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
7138 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
7139 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
7140 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
7141 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
7142 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
7143 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
7144 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
7149 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
7150 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
7151 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
7152 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
7153 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
7154 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
7155 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
7160 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
7161 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
7162 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
7163 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
7164 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
7165 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
7166 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
7171 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
7172 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
7173 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
7174 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
7175 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
7176 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
7177 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
7182 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
7183 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
7184 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
7185 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
7186 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
7187 if (mode
== VOIDmode
)
7188 mode
= TYPE_MODE (boolean_type_node
);
7189 if (!target
|| !register_operand (target
, mode
))
7190 target
= gen_reg_rtx (mode
);
7192 mode
= get_builtin_sync_mode
7193 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
7194 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
7199 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
7200 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
7201 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
7202 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
7203 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
7204 mode
= get_builtin_sync_mode
7205 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
7206 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
7211 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
7212 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
7213 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
7214 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
7215 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
7216 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
7217 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
7222 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
7223 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
7224 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
7225 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
7226 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
7227 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
7228 expand_builtin_sync_lock_release (mode
, exp
);
7231 case BUILT_IN_SYNC_SYNCHRONIZE
:
7232 expand_builtin_sync_synchronize ();
7235 case BUILT_IN_ATOMIC_EXCHANGE_1
:
7236 case BUILT_IN_ATOMIC_EXCHANGE_2
:
7237 case BUILT_IN_ATOMIC_EXCHANGE_4
:
7238 case BUILT_IN_ATOMIC_EXCHANGE_8
:
7239 case BUILT_IN_ATOMIC_EXCHANGE_16
:
7240 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
7241 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
7246 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
7247 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
7248 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
7249 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
7250 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
7252 unsigned int nargs
, z
;
7253 vec
<tree
, va_gc
> *vec
;
7256 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
7257 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
7261 /* If this is turned into an external library call, the weak parameter
7262 must be dropped to match the expected parameter list. */
7263 nargs
= call_expr_nargs (exp
);
7264 vec_alloc (vec
, nargs
- 1);
7265 for (z
= 0; z
< 3; z
++)
7266 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
7267 /* Skip the boolean weak parameter. */
7268 for (z
= 4; z
< 6; z
++)
7269 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
7270 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
7274 case BUILT_IN_ATOMIC_LOAD_1
:
7275 case BUILT_IN_ATOMIC_LOAD_2
:
7276 case BUILT_IN_ATOMIC_LOAD_4
:
7277 case BUILT_IN_ATOMIC_LOAD_8
:
7278 case BUILT_IN_ATOMIC_LOAD_16
:
7279 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
7280 target
= expand_builtin_atomic_load (mode
, exp
, target
);
7285 case BUILT_IN_ATOMIC_STORE_1
:
7286 case BUILT_IN_ATOMIC_STORE_2
:
7287 case BUILT_IN_ATOMIC_STORE_4
:
7288 case BUILT_IN_ATOMIC_STORE_8
:
7289 case BUILT_IN_ATOMIC_STORE_16
:
7290 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
7291 target
= expand_builtin_atomic_store (mode
, exp
);
7296 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
7297 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
7298 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
7299 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
7300 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
7302 enum built_in_function lib
;
7303 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
7304 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
7305 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
7306 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
7312 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
7313 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
7314 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
7315 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
7316 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
7318 enum built_in_function lib
;
7319 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
7320 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
7321 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
7322 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
7328 case BUILT_IN_ATOMIC_AND_FETCH_1
:
7329 case BUILT_IN_ATOMIC_AND_FETCH_2
:
7330 case BUILT_IN_ATOMIC_AND_FETCH_4
:
7331 case BUILT_IN_ATOMIC_AND_FETCH_8
:
7332 case BUILT_IN_ATOMIC_AND_FETCH_16
:
7334 enum built_in_function lib
;
7335 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
7336 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
7337 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
7338 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
7344 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
7345 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
7346 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
7347 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
7348 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
7350 enum built_in_function lib
;
7351 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
7352 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
7353 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
7354 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
7360 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
7361 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
7362 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
7363 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
7364 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
7366 enum built_in_function lib
;
7367 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
7368 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
7369 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
7370 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
7376 case BUILT_IN_ATOMIC_OR_FETCH_1
:
7377 case BUILT_IN_ATOMIC_OR_FETCH_2
:
7378 case BUILT_IN_ATOMIC_OR_FETCH_4
:
7379 case BUILT_IN_ATOMIC_OR_FETCH_8
:
7380 case BUILT_IN_ATOMIC_OR_FETCH_16
:
7382 enum built_in_function lib
;
7383 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
7384 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
7385 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
7386 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
7392 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
7393 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
7394 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
7395 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
7396 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
7397 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
7398 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
7399 ignore
, BUILT_IN_NONE
);
7404 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
7405 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
7406 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
7407 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
7408 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
7409 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
7410 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
7411 ignore
, BUILT_IN_NONE
);
7416 case BUILT_IN_ATOMIC_FETCH_AND_1
:
7417 case BUILT_IN_ATOMIC_FETCH_AND_2
:
7418 case BUILT_IN_ATOMIC_FETCH_AND_4
:
7419 case BUILT_IN_ATOMIC_FETCH_AND_8
:
7420 case BUILT_IN_ATOMIC_FETCH_AND_16
:
7421 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
7422 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
7423 ignore
, BUILT_IN_NONE
);
7428 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
7429 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
7430 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
7431 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
7432 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
7433 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
7434 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
7435 ignore
, BUILT_IN_NONE
);
7440 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
7441 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
7442 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
7443 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
7444 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
7445 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
7446 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
7447 ignore
, BUILT_IN_NONE
);
7452 case BUILT_IN_ATOMIC_FETCH_OR_1
:
7453 case BUILT_IN_ATOMIC_FETCH_OR_2
:
7454 case BUILT_IN_ATOMIC_FETCH_OR_4
:
7455 case BUILT_IN_ATOMIC_FETCH_OR_8
:
7456 case BUILT_IN_ATOMIC_FETCH_OR_16
:
7457 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
7458 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
7459 ignore
, BUILT_IN_NONE
);
7464 case BUILT_IN_ATOMIC_TEST_AND_SET
:
7465 return expand_builtin_atomic_test_and_set (exp
, target
);
7467 case BUILT_IN_ATOMIC_CLEAR
:
7468 return expand_builtin_atomic_clear (exp
);
7470 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
7471 return expand_builtin_atomic_always_lock_free (exp
);
7473 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
7474 target
= expand_builtin_atomic_is_lock_free (exp
);
7479 case BUILT_IN_ATOMIC_THREAD_FENCE
:
7480 expand_builtin_atomic_thread_fence (exp
);
7483 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
7484 expand_builtin_atomic_signal_fence (exp
);
7487 case BUILT_IN_OBJECT_SIZE
:
7488 return expand_builtin_object_size (exp
);
7490 case BUILT_IN_MEMCPY_CHK
:
7491 case BUILT_IN_MEMPCPY_CHK
:
7492 case BUILT_IN_MEMMOVE_CHK
:
7493 case BUILT_IN_MEMSET_CHK
:
7494 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
7499 case BUILT_IN_STRCPY_CHK
:
7500 case BUILT_IN_STPCPY_CHK
:
7501 case BUILT_IN_STRNCPY_CHK
:
7502 case BUILT_IN_STPNCPY_CHK
:
7503 case BUILT_IN_STRCAT_CHK
:
7504 case BUILT_IN_STRNCAT_CHK
:
7505 case BUILT_IN_SNPRINTF_CHK
:
7506 case BUILT_IN_VSNPRINTF_CHK
:
7507 maybe_emit_chk_warning (exp
, fcode
);
7510 case BUILT_IN_SPRINTF_CHK
:
7511 case BUILT_IN_VSPRINTF_CHK
:
7512 maybe_emit_sprintf_chk_warning (exp
, fcode
);
7516 if (warn_free_nonheap_object
)
7517 maybe_emit_free_warning (exp
);
7520 case BUILT_IN_THREAD_POINTER
:
7521 return expand_builtin_thread_pointer (exp
, target
);
7523 case BUILT_IN_SET_THREAD_POINTER
:
7524 expand_builtin_set_thread_pointer (exp
);
7527 case BUILT_IN_CILK_DETACH
:
7528 expand_builtin_cilk_detach (exp
);
7531 case BUILT_IN_CILK_POP_FRAME
:
7532 expand_builtin_cilk_pop_frame (exp
);
7535 case BUILT_IN_CHKP_INIT_PTR_BOUNDS
:
7536 case BUILT_IN_CHKP_NULL_PTR_BOUNDS
:
7537 case BUILT_IN_CHKP_COPY_PTR_BOUNDS
:
7538 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
:
7539 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
:
7540 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS
:
7541 case BUILT_IN_CHKP_SET_PTR_BOUNDS
:
7542 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS
:
7543 case BUILT_IN_CHKP_STORE_PTR_BOUNDS
:
7544 case BUILT_IN_CHKP_GET_PTR_LBOUND
:
7545 case BUILT_IN_CHKP_GET_PTR_UBOUND
:
7546 /* We allow user CHKP builtins if Pointer Bounds
7548 if (!chkp_function_instrumented_p (current_function_decl
))
7550 if (fcode
== BUILT_IN_CHKP_SET_PTR_BOUNDS
7551 || fcode
== BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7552 || fcode
== BUILT_IN_CHKP_INIT_PTR_BOUNDS
7553 || fcode
== BUILT_IN_CHKP_NULL_PTR_BOUNDS
7554 || fcode
== BUILT_IN_CHKP_COPY_PTR_BOUNDS
)
7555 return expand_normal (CALL_EXPR_ARG (exp
, 0));
7556 else if (fcode
== BUILT_IN_CHKP_GET_PTR_LBOUND
)
7557 return expand_normal (size_zero_node
);
7558 else if (fcode
== BUILT_IN_CHKP_GET_PTR_UBOUND
)
7559 return expand_normal (size_int (-1));
7565 case BUILT_IN_CHKP_BNDMK
:
7566 case BUILT_IN_CHKP_BNDSTX
:
7567 case BUILT_IN_CHKP_BNDCL
:
7568 case BUILT_IN_CHKP_BNDCU
:
7569 case BUILT_IN_CHKP_BNDLDX
:
7570 case BUILT_IN_CHKP_BNDRET
:
7571 case BUILT_IN_CHKP_INTERSECT
:
7572 case BUILT_IN_CHKP_NARROW
:
7573 case BUILT_IN_CHKP_EXTRACT_LOWER
:
7574 case BUILT_IN_CHKP_EXTRACT_UPPER
:
7575 /* Software implementation of Pointer Bounds Checker is NYI.
7576 Target support is required. */
7577 error ("Your target platform does not support -fcheck-pointer-bounds");
7580 case BUILT_IN_ACC_ON_DEVICE
:
7581 /* Do library call, if we failed to expand the builtin when
7585 default: /* just do library call, if unknown builtin */
7589 /* The switch statement above can drop through to cause the function
7590 to be called normally. */
7591 return expand_call (exp
, target
, ignore
);
7594 /* Similar to expand_builtin but is used for instrumented calls. */
7597 expand_builtin_with_bounds (tree exp
, rtx target
,
7598 rtx subtarget ATTRIBUTE_UNUSED
,
7599 machine_mode mode
, int ignore
)
7601 tree fndecl
= get_callee_fndecl (exp
);
7602 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7604 gcc_assert (CALL_WITH_BOUNDS_P (exp
));
7606 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7607 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
7609 gcc_assert (fcode
> BEGIN_CHKP_BUILTINS
7610 && fcode
< END_CHKP_BUILTINS
);
7614 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
:
7615 target
= expand_builtin_memcpy_with_bounds (exp
, target
);
7620 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
:
7621 target
= expand_builtin_mempcpy_with_bounds (exp
, target
);
7626 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
:
7627 target
= expand_builtin_memset_with_bounds (exp
, target
, mode
);
7636 /* The switch statement above can drop through to cause the function
7637 to be called normally. */
7638 return expand_call (exp
, target
, ignore
);
7641 /* Determine whether a tree node represents a call to a built-in
7642 function. If the tree T is a call to a built-in function with
7643 the right number of arguments of the appropriate types, return
7644 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7645 Otherwise the return value is END_BUILTINS. */
7647 enum built_in_function
7648 builtin_mathfn_code (const_tree t
)
7650 const_tree fndecl
, arg
, parmlist
;
7651 const_tree argtype
, parmtype
;
7652 const_call_expr_arg_iterator iter
;
7654 if (TREE_CODE (t
) != CALL_EXPR
7655 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
7656 return END_BUILTINS
;
7658 fndecl
= get_callee_fndecl (t
);
7659 if (fndecl
== NULL_TREE
7660 || TREE_CODE (fndecl
) != FUNCTION_DECL
7661 || ! DECL_BUILT_IN (fndecl
)
7662 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7663 return END_BUILTINS
;
7665 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
7666 init_const_call_expr_arg_iterator (t
, &iter
);
7667 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
7669 /* If a function doesn't take a variable number of arguments,
7670 the last element in the list will have type `void'. */
7671 parmtype
= TREE_VALUE (parmlist
);
7672 if (VOID_TYPE_P (parmtype
))
7674 if (more_const_call_expr_args_p (&iter
))
7675 return END_BUILTINS
;
7676 return DECL_FUNCTION_CODE (fndecl
);
7679 if (! more_const_call_expr_args_p (&iter
))
7680 return END_BUILTINS
;
7682 arg
= next_const_call_expr_arg (&iter
);
7683 argtype
= TREE_TYPE (arg
);
7685 if (SCALAR_FLOAT_TYPE_P (parmtype
))
7687 if (! SCALAR_FLOAT_TYPE_P (argtype
))
7688 return END_BUILTINS
;
7690 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
7692 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
7693 return END_BUILTINS
;
7695 else if (POINTER_TYPE_P (parmtype
))
7697 if (! POINTER_TYPE_P (argtype
))
7698 return END_BUILTINS
;
7700 else if (INTEGRAL_TYPE_P (parmtype
))
7702 if (! INTEGRAL_TYPE_P (argtype
))
7703 return END_BUILTINS
;
7706 return END_BUILTINS
;
7709 /* Variable-length argument list. */
7710 return DECL_FUNCTION_CODE (fndecl
);
7713 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7714 evaluate to a constant. */
7717 fold_builtin_constant_p (tree arg
)
7719 /* We return 1 for a numeric type that's known to be a constant
7720 value at compile-time or for an aggregate type that's a
7721 literal constant. */
7724 /* If we know this is a constant, emit the constant of one. */
7725 if (CONSTANT_CLASS_P (arg
)
7726 || (TREE_CODE (arg
) == CONSTRUCTOR
7727 && TREE_CONSTANT (arg
)))
7728 return integer_one_node
;
7729 if (TREE_CODE (arg
) == ADDR_EXPR
)
7731 tree op
= TREE_OPERAND (arg
, 0);
7732 if (TREE_CODE (op
) == STRING_CST
7733 || (TREE_CODE (op
) == ARRAY_REF
7734 && integer_zerop (TREE_OPERAND (op
, 1))
7735 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
7736 return integer_one_node
;
7739 /* If this expression has side effects, show we don't know it to be a
7740 constant. Likewise if it's a pointer or aggregate type since in
7741 those case we only want literals, since those are only optimized
7742 when generating RTL, not later.
7743 And finally, if we are compiling an initializer, not code, we
7744 need to return a definite result now; there's not going to be any
7745 more optimization done. */
7746 if (TREE_SIDE_EFFECTS (arg
)
7747 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
7748 || POINTER_TYPE_P (TREE_TYPE (arg
))
7750 || folding_initializer
7751 || force_folding_builtin_constant_p
)
7752 return integer_zero_node
;
7757 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7758 return it as a truthvalue. */
7761 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
,
7764 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
7766 fn
= builtin_decl_explicit (BUILT_IN_EXPECT
);
7767 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
7768 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
7769 pred_type
= TREE_VALUE (arg_types
);
7770 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
7772 pred
= fold_convert_loc (loc
, pred_type
, pred
);
7773 expected
= fold_convert_loc (loc
, expected_type
, expected
);
7774 call_expr
= build_call_expr_loc (loc
, fn
, predictor
? 3 : 2, pred
, expected
,
7777 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
7778 build_int_cst (ret_type
, 0));
7781 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7782 NULL_TREE if no simplification is possible. */
7785 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
, tree arg2
)
7787 tree inner
, fndecl
, inner_arg0
;
7788 enum tree_code code
;
7790 /* Distribute the expected value over short-circuiting operators.
7791 See through the cast from truthvalue_type_node to long. */
7793 while (CONVERT_EXPR_P (inner_arg0
)
7794 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
7795 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
7796 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
7798 /* If this is a builtin_expect within a builtin_expect keep the
7799 inner one. See through a comparison against a constant. It
7800 might have been added to create a thruthvalue. */
7803 if (COMPARISON_CLASS_P (inner
)
7804 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
7805 inner
= TREE_OPERAND (inner
, 0);
7807 if (TREE_CODE (inner
) == CALL_EXPR
7808 && (fndecl
= get_callee_fndecl (inner
))
7809 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7810 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
7814 code
= TREE_CODE (inner
);
7815 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
7817 tree op0
= TREE_OPERAND (inner
, 0);
7818 tree op1
= TREE_OPERAND (inner
, 1);
7820 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
, arg2
);
7821 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
, arg2
);
7822 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
7824 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
7827 /* If the argument isn't invariant then there's nothing else we can do. */
7828 if (!TREE_CONSTANT (inner_arg0
))
7831 /* If we expect that a comparison against the argument will fold to
7832 a constant return the constant. In practice, this means a true
7833 constant or the address of a non-weak symbol. */
7836 if (TREE_CODE (inner
) == ADDR_EXPR
)
7840 inner
= TREE_OPERAND (inner
, 0);
7842 while (TREE_CODE (inner
) == COMPONENT_REF
7843 || TREE_CODE (inner
) == ARRAY_REF
);
7844 if (VAR_OR_FUNCTION_DECL_P (inner
) && DECL_WEAK (inner
))
7848 /* Otherwise, ARG0 already has the proper type for the return value. */
7852 /* Fold a call to __builtin_classify_type with argument ARG. */
7855 fold_builtin_classify_type (tree arg
)
7858 return build_int_cst (integer_type_node
, no_type_class
);
7860 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
7863 /* Fold a call to __builtin_strlen with argument ARG. */
7866 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
7868 if (!validate_arg (arg
, POINTER_TYPE
))
7872 tree len
= c_strlen (arg
, 0);
7875 return fold_convert_loc (loc
, type
, len
);
7881 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7884 fold_builtin_inf (location_t loc
, tree type
, int warn
)
7886 REAL_VALUE_TYPE real
;
7888 /* __builtin_inff is intended to be usable to define INFINITY on all
7889 targets. If an infinity is not available, INFINITY expands "to a
7890 positive constant of type float that overflows at translation
7891 time", footnote "In this case, using INFINITY will violate the
7892 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7893 Thus we pedwarn to ensure this constraint violation is
7895 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
7896 pedwarn (loc
, 0, "target format does not support infinity");
7899 return build_real (type
, real
);
7902 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7903 NULL_TREE if no simplification can be made. */
7906 fold_builtin_sincos (location_t loc
,
7907 tree arg0
, tree arg1
, tree arg2
)
7910 tree fndecl
, call
= NULL_TREE
;
7912 if (!validate_arg (arg0
, REAL_TYPE
)
7913 || !validate_arg (arg1
, POINTER_TYPE
)
7914 || !validate_arg (arg2
, POINTER_TYPE
))
7917 type
= TREE_TYPE (arg0
);
7919 /* Calculate the result when the argument is a constant. */
7920 built_in_function fn
= mathfn_built_in_2 (type
, CFN_BUILT_IN_CEXPI
);
7921 if (fn
== END_BUILTINS
)
7924 /* Canonicalize sincos to cexpi. */
7925 if (TREE_CODE (arg0
) == REAL_CST
)
7927 tree complex_type
= build_complex_type (type
);
7928 call
= fold_const_call (as_combined_fn (fn
), complex_type
, arg0
);
7932 if (!targetm
.libc_has_function (function_c99_math_complex
)
7933 || !builtin_decl_implicit_p (fn
))
7935 fndecl
= builtin_decl_explicit (fn
);
7936 call
= build_call_expr_loc (loc
, fndecl
, 1, arg0
);
7937 call
= builtin_save_expr (call
);
7940 return build2 (COMPOUND_EXPR
, void_type_node
,
7941 build2 (MODIFY_EXPR
, void_type_node
,
7942 build_fold_indirect_ref_loc (loc
, arg1
),
7943 fold_build1_loc (loc
, IMAGPART_EXPR
, type
, call
)),
7944 build2 (MODIFY_EXPR
, void_type_node
,
7945 build_fold_indirect_ref_loc (loc
, arg2
),
7946 fold_build1_loc (loc
, REALPART_EXPR
, type
, call
)));
7949 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7950 Return NULL_TREE if no simplification can be made. */
7953 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
7955 if (!validate_arg (arg1
, POINTER_TYPE
)
7956 || !validate_arg (arg2
, POINTER_TYPE
)
7957 || !validate_arg (len
, INTEGER_TYPE
))
7960 /* If the LEN parameter is zero, return zero. */
7961 if (integer_zerop (len
))
7962 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
7965 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7966 if (operand_equal_p (arg1
, arg2
, 0))
7967 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
7969 /* If len parameter is one, return an expression corresponding to
7970 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7971 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
7973 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
7974 tree cst_uchar_ptr_node
7975 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
7978 = fold_convert_loc (loc
, integer_type_node
,
7979 build1 (INDIRECT_REF
, cst_uchar_node
,
7980 fold_convert_loc (loc
,
7984 = fold_convert_loc (loc
, integer_type_node
,
7985 build1 (INDIRECT_REF
, cst_uchar_node
,
7986 fold_convert_loc (loc
,
7989 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
7995 /* Fold a call to builtin isascii with argument ARG. */
7998 fold_builtin_isascii (location_t loc
, tree arg
)
8000 if (!validate_arg (arg
, INTEGER_TYPE
))
8004 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8005 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
8006 build_int_cst (integer_type_node
,
8007 ~ (unsigned HOST_WIDE_INT
) 0x7f));
8008 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
8009 arg
, integer_zero_node
);
8013 /* Fold a call to builtin toascii with argument ARG. */
8016 fold_builtin_toascii (location_t loc
, tree arg
)
8018 if (!validate_arg (arg
, INTEGER_TYPE
))
8021 /* Transform toascii(c) -> (c & 0x7f). */
8022 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
8023 build_int_cst (integer_type_node
, 0x7f));
8026 /* Fold a call to builtin isdigit with argument ARG. */
8029 fold_builtin_isdigit (location_t loc
, tree arg
)
8031 if (!validate_arg (arg
, INTEGER_TYPE
))
8035 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8036 /* According to the C standard, isdigit is unaffected by locale.
8037 However, it definitely is affected by the target character set. */
8038 unsigned HOST_WIDE_INT target_digit0
8039 = lang_hooks
.to_target_charset ('0');
8041 if (target_digit0
== 0)
8044 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
8045 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
8046 build_int_cst (unsigned_type_node
, target_digit0
));
8047 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
8048 build_int_cst (unsigned_type_node
, 9));
8052 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8055 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
8057 if (!validate_arg (arg
, REAL_TYPE
))
8060 arg
= fold_convert_loc (loc
, type
, arg
);
8061 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8064 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8067 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
8069 if (!validate_arg (arg
, INTEGER_TYPE
))
8072 arg
= fold_convert_loc (loc
, type
, arg
);
8073 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8076 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8079 fold_builtin_fma (location_t loc
, tree arg0
, tree arg1
, tree arg2
, tree type
)
8081 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8082 if (validate_arg (arg0
, REAL_TYPE
)
8083 && validate_arg (arg1
, REAL_TYPE
)
8084 && validate_arg (arg2
, REAL_TYPE
)
8085 && optab_handler (fma_optab
, TYPE_MODE (type
)) != CODE_FOR_nothing
)
8086 return fold_build3_loc (loc
, FMA_EXPR
, type
, arg0
, arg1
, arg2
);
8091 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8094 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
8096 if (validate_arg (arg
, COMPLEX_TYPE
)
8097 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
8099 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
8103 tree new_arg
= builtin_save_expr (arg
);
8104 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
8105 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
8106 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
8113 /* Fold a call to builtin frexp, we can assume the base is 2. */
8116 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
8118 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
8123 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
8126 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
8128 /* Proceed if a valid pointer type was passed in. */
8129 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
8131 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
8137 /* For +-0, return (*exp = 0, +-0). */
8138 exp
= integer_zero_node
;
8143 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8144 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
8147 /* Since the frexp function always expects base 2, and in
8148 GCC normalized significands are already in the range
8149 [0.5, 1.0), we have exactly what frexp wants. */
8150 REAL_VALUE_TYPE frac_rvt
= *value
;
8151 SET_REAL_EXP (&frac_rvt
, 0);
8152 frac
= build_real (rettype
, frac_rvt
);
8153 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
8160 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8161 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
8162 TREE_SIDE_EFFECTS (arg1
) = 1;
8163 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
8169 /* Fold a call to builtin modf. */
8172 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
8174 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
8179 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
8182 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
8184 /* Proceed if a valid pointer type was passed in. */
8185 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
8187 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
8188 REAL_VALUE_TYPE trunc
, frac
;
8194 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8195 trunc
= frac
= *value
;
8198 /* For +-Inf, return (*arg1 = arg0, +-0). */
8200 frac
.sign
= value
->sign
;
8204 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8205 real_trunc (&trunc
, VOIDmode
, value
);
8206 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
8207 /* If the original number was negative and already
8208 integral, then the fractional part is -0.0. */
8209 if (value
->sign
&& frac
.cl
== rvc_zero
)
8210 frac
.sign
= value
->sign
;
8214 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8215 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
8216 build_real (rettype
, trunc
));
8217 TREE_SIDE_EFFECTS (arg1
) = 1;
8218 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
8219 build_real (rettype
, frac
));
8225 /* Given a location LOC, an interclass builtin function decl FNDECL
8226 and its single argument ARG, return an folded expression computing
8227 the same, or NULL_TREE if we either couldn't or didn't want to fold
8228 (the latter happen if there's an RTL instruction available). */
8231 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
8235 if (!validate_arg (arg
, REAL_TYPE
))
8238 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
8241 mode
= TYPE_MODE (TREE_TYPE (arg
));
8243 bool is_ibm_extended
= MODE_COMPOSITE_P (mode
);
8245 /* If there is no optab, try generic code. */
8246 switch (DECL_FUNCTION_CODE (fndecl
))
8250 CASE_FLT_FN (BUILT_IN_ISINF
):
8252 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8253 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
8254 tree type
= TREE_TYPE (arg
);
8258 if (is_ibm_extended
)
8260 /* NaN and Inf are encoded in the high-order double value
8261 only. The low-order value is not significant. */
8262 type
= double_type_node
;
8264 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
8266 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
8267 real_from_string (&r
, buf
);
8268 result
= build_call_expr (isgr_fn
, 2,
8269 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
8270 build_real (type
, r
));
8273 CASE_FLT_FN (BUILT_IN_FINITE
):
8274 case BUILT_IN_ISFINITE
:
8276 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8277 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
8278 tree type
= TREE_TYPE (arg
);
8282 if (is_ibm_extended
)
8284 /* NaN and Inf are encoded in the high-order double value
8285 only. The low-order value is not significant. */
8286 type
= double_type_node
;
8288 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
8290 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
8291 real_from_string (&r
, buf
);
8292 result
= build_call_expr (isle_fn
, 2,
8293 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
8294 build_real (type
, r
));
8295 /*result = fold_build2_loc (loc, UNGT_EXPR,
8296 TREE_TYPE (TREE_TYPE (fndecl)),
8297 fold_build1_loc (loc, ABS_EXPR, type, arg),
8298 build_real (type, r));
8299 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8300 TREE_TYPE (TREE_TYPE (fndecl)),
8304 case BUILT_IN_ISNORMAL
:
8306 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8307 islessequal(fabs(x),DBL_MAX). */
8308 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
8309 tree type
= TREE_TYPE (arg
);
8310 tree orig_arg
, max_exp
, min_exp
;
8311 machine_mode orig_mode
= mode
;
8312 REAL_VALUE_TYPE rmax
, rmin
;
8315 orig_arg
= arg
= builtin_save_expr (arg
);
8316 if (is_ibm_extended
)
8318 /* Use double to test the normal range of IBM extended
8319 precision. Emin for IBM extended precision is
8320 different to emin for IEEE double, being 53 higher
8321 since the low double exponent is at least 53 lower
8322 than the high double exponent. */
8323 type
= double_type_node
;
8325 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
8327 arg
= fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8329 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
8330 real_from_string (&rmax
, buf
);
8331 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (orig_mode
)->emin
- 1);
8332 real_from_string (&rmin
, buf
);
8333 max_exp
= build_real (type
, rmax
);
8334 min_exp
= build_real (type
, rmin
);
8336 max_exp
= build_call_expr (isle_fn
, 2, arg
, max_exp
);
8337 if (is_ibm_extended
)
8339 /* Testing the high end of the range is done just using
8340 the high double, using the same test as isfinite().
8341 For the subnormal end of the range we first test the
8342 high double, then if its magnitude is equal to the
8343 limit of 0x1p-969, we test whether the low double is
8344 non-zero and opposite sign to the high double. */
8345 tree
const islt_fn
= builtin_decl_explicit (BUILT_IN_ISLESS
);
8346 tree
const isgt_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
8347 tree gt_min
= build_call_expr (isgt_fn
, 2, arg
, min_exp
);
8348 tree eq_min
= fold_build2 (EQ_EXPR
, integer_type_node
,
8350 tree as_complex
= build1 (VIEW_CONVERT_EXPR
,
8351 complex_double_type_node
, orig_arg
);
8352 tree hi_dbl
= build1 (REALPART_EXPR
, type
, as_complex
);
8353 tree lo_dbl
= build1 (IMAGPART_EXPR
, type
, as_complex
);
8354 tree zero
= build_real (type
, dconst0
);
8355 tree hilt
= build_call_expr (islt_fn
, 2, hi_dbl
, zero
);
8356 tree lolt
= build_call_expr (islt_fn
, 2, lo_dbl
, zero
);
8357 tree logt
= build_call_expr (isgt_fn
, 2, lo_dbl
, zero
);
8358 tree ok_lo
= fold_build1 (TRUTH_NOT_EXPR
, integer_type_node
,
8359 fold_build3 (COND_EXPR
,
8362 eq_min
= fold_build2 (TRUTH_ANDIF_EXPR
, integer_type_node
,
8364 min_exp
= fold_build2 (TRUTH_ORIF_EXPR
, integer_type_node
,
8370 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
8371 min_exp
= build_call_expr (isge_fn
, 2, arg
, min_exp
);
8373 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
,
8384 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8385 ARG is the argument for the call. */
8388 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
8390 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8392 if (!validate_arg (arg
, REAL_TYPE
))
8395 switch (builtin_index
)
8397 case BUILT_IN_ISINF
:
8398 if (!HONOR_INFINITIES (arg
))
8399 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
8403 case BUILT_IN_ISINF_SIGN
:
8405 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8406 /* In a boolean context, GCC will fold the inner COND_EXPR to
8407 1. So e.g. "if (isinf_sign(x))" would be folded to just
8408 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8409 tree signbit_fn
= builtin_decl_explicit (BUILT_IN_SIGNBIT
);
8410 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
8411 tree tmp
= NULL_TREE
;
8413 arg
= builtin_save_expr (arg
);
8415 if (signbit_fn
&& isinf_fn
)
8417 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
8418 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
8420 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
8421 signbit_call
, integer_zero_node
);
8422 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
8423 isinf_call
, integer_zero_node
);
8425 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
8426 integer_minus_one_node
, integer_one_node
);
8427 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
8435 case BUILT_IN_ISFINITE
:
8436 if (!HONOR_NANS (arg
)
8437 && !HONOR_INFINITIES (arg
))
8438 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
8442 case BUILT_IN_ISNAN
:
8443 if (!HONOR_NANS (arg
))
8444 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
8447 bool is_ibm_extended
= MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg
)));
8448 if (is_ibm_extended
)
8450 /* NaN and Inf are encoded in the high-order double value
8451 only. The low-order value is not significant. */
8452 arg
= fold_build1_loc (loc
, NOP_EXPR
, double_type_node
, arg
);
8455 arg
= builtin_save_expr (arg
);
8456 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
8463 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8464 This builtin will generate code to return the appropriate floating
8465 point classification depending on the value of the floating point
8466 number passed in. The possible return values must be supplied as
8467 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8468 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8469 one floating point argument which is "type generic". */
8472 fold_builtin_fpclassify (location_t loc
, tree
*args
, int nargs
)
8474 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
8475 arg
, type
, res
, tmp
;
8480 /* Verify the required arguments in the original call. */
8482 || !validate_arg (args
[0], INTEGER_TYPE
)
8483 || !validate_arg (args
[1], INTEGER_TYPE
)
8484 || !validate_arg (args
[2], INTEGER_TYPE
)
8485 || !validate_arg (args
[3], INTEGER_TYPE
)
8486 || !validate_arg (args
[4], INTEGER_TYPE
)
8487 || !validate_arg (args
[5], REAL_TYPE
))
8491 fp_infinite
= args
[1];
8492 fp_normal
= args
[2];
8493 fp_subnormal
= args
[3];
8496 type
= TREE_TYPE (arg
);
8497 mode
= TYPE_MODE (type
);
8498 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
8502 (fabs(x) == Inf ? FP_INFINITE :
8503 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8504 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8506 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
8507 build_real (type
, dconst0
));
8508 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
8509 tmp
, fp_zero
, fp_subnormal
);
8511 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
8512 real_from_string (&r
, buf
);
8513 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
8514 arg
, build_real (type
, r
));
8515 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
8517 if (HONOR_INFINITIES (mode
))
8520 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
8521 build_real (type
, r
));
8522 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
8526 if (HONOR_NANS (mode
))
8528 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
8529 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
8535 /* Fold a call to an unordered comparison function such as
8536 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8537 being called and ARG0 and ARG1 are the arguments for the call.
8538 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8539 the opposite of the desired result. UNORDERED_CODE is used
8540 for modes that can hold NaNs and ORDERED_CODE is used for
8544 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
8545 enum tree_code unordered_code
,
8546 enum tree_code ordered_code
)
8548 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8549 enum tree_code code
;
8551 enum tree_code code0
, code1
;
8552 tree cmp_type
= NULL_TREE
;
8554 type0
= TREE_TYPE (arg0
);
8555 type1
= TREE_TYPE (arg1
);
8557 code0
= TREE_CODE (type0
);
8558 code1
= TREE_CODE (type1
);
8560 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
8561 /* Choose the wider of two real types. */
8562 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
8564 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
8566 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
8569 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
8570 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
8572 if (unordered_code
== UNORDERED_EXPR
)
8574 if (!HONOR_NANS (arg0
))
8575 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
8576 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
8579 code
= HONOR_NANS (arg0
) ? unordered_code
: ordered_code
;
8580 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
8581 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
8584 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8585 arithmetics if it can never overflow, or into internal functions that
8586 return both result of arithmetics and overflowed boolean flag in
8587 a complex integer result, or some other check for overflow.
8588 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8589 checking part of that. */
8592 fold_builtin_arith_overflow (location_t loc
, enum built_in_function fcode
,
8593 tree arg0
, tree arg1
, tree arg2
)
8595 enum internal_fn ifn
= IFN_LAST
;
8596 /* The code of the expression corresponding to the type-generic
8597 built-in, or ERROR_MARK for the type-specific ones. */
8598 enum tree_code opcode
= ERROR_MARK
;
8599 bool ovf_only
= false;
8603 case BUILT_IN_ADD_OVERFLOW_P
:
8606 case BUILT_IN_ADD_OVERFLOW
:
8609 case BUILT_IN_SADD_OVERFLOW
:
8610 case BUILT_IN_SADDL_OVERFLOW
:
8611 case BUILT_IN_SADDLL_OVERFLOW
:
8612 case BUILT_IN_UADD_OVERFLOW
:
8613 case BUILT_IN_UADDL_OVERFLOW
:
8614 case BUILT_IN_UADDLL_OVERFLOW
:
8615 ifn
= IFN_ADD_OVERFLOW
;
8617 case BUILT_IN_SUB_OVERFLOW_P
:
8620 case BUILT_IN_SUB_OVERFLOW
:
8621 opcode
= MINUS_EXPR
;
8623 case BUILT_IN_SSUB_OVERFLOW
:
8624 case BUILT_IN_SSUBL_OVERFLOW
:
8625 case BUILT_IN_SSUBLL_OVERFLOW
:
8626 case BUILT_IN_USUB_OVERFLOW
:
8627 case BUILT_IN_USUBL_OVERFLOW
:
8628 case BUILT_IN_USUBLL_OVERFLOW
:
8629 ifn
= IFN_SUB_OVERFLOW
;
8631 case BUILT_IN_MUL_OVERFLOW_P
:
8634 case BUILT_IN_MUL_OVERFLOW
:
8637 case BUILT_IN_SMUL_OVERFLOW
:
8638 case BUILT_IN_SMULL_OVERFLOW
:
8639 case BUILT_IN_SMULLL_OVERFLOW
:
8640 case BUILT_IN_UMUL_OVERFLOW
:
8641 case BUILT_IN_UMULL_OVERFLOW
:
8642 case BUILT_IN_UMULLL_OVERFLOW
:
8643 ifn
= IFN_MUL_OVERFLOW
;
8649 /* For the "generic" overloads, the first two arguments can have different
8650 types and the last argument determines the target type to use to check
8651 for overflow. The arguments of the other overloads all have the same
8653 tree type
= ovf_only
? TREE_TYPE (arg2
) : TREE_TYPE (TREE_TYPE (arg2
));
8655 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8656 arguments are constant, attempt to fold the built-in call into a constant
8657 expression indicating whether or not it detected an overflow. */
8659 && TREE_CODE (arg0
) == INTEGER_CST
8660 && TREE_CODE (arg1
) == INTEGER_CST
)
8661 /* Perform the computation in the target type and check for overflow. */
8662 return omit_one_operand_loc (loc
, boolean_type_node
,
8663 arith_overflowed_p (opcode
, type
, arg0
, arg1
)
8664 ? boolean_true_node
: boolean_false_node
,
8667 tree ctype
= build_complex_type (type
);
8668 tree call
= build_call_expr_internal_loc (loc
, ifn
, ctype
,
8670 tree tgt
= save_expr (call
);
8671 tree intres
= build1_loc (loc
, REALPART_EXPR
, type
, tgt
);
8672 tree ovfres
= build1_loc (loc
, IMAGPART_EXPR
, type
, tgt
);
8673 ovfres
= fold_convert_loc (loc
, boolean_type_node
, ovfres
);
8676 return omit_one_operand_loc (loc
, boolean_type_node
, ovfres
, arg2
);
8678 tree mem_arg2
= build_fold_indirect_ref_loc (loc
, arg2
);
8680 = fold_build2_loc (loc
, MODIFY_EXPR
, void_type_node
, mem_arg2
, intres
);
8681 return build2_loc (loc
, COMPOUND_EXPR
, boolean_type_node
, store
, ovfres
);
8684 /* Fold a call to __builtin_FILE to a constant string. */
8687 fold_builtin_FILE (location_t loc
)
8689 if (const char *fname
= LOCATION_FILE (loc
))
8690 return build_string_literal (strlen (fname
) + 1, fname
);
8692 return build_string_literal (1, "");
8695 /* Fold a call to __builtin_FUNCTION to a constant string. */
8698 fold_builtin_FUNCTION ()
8700 const char *name
= "";
8702 if (current_function_decl
)
8703 name
= lang_hooks
.decl_printable_name (current_function_decl
, 0);
8705 return build_string_literal (strlen (name
) + 1, name
);
8708 /* Fold a call to __builtin_LINE to an integer constant. */
8711 fold_builtin_LINE (location_t loc
, tree type
)
8713 return build_int_cst (type
, LOCATION_LINE (loc
));
8716 /* Fold a call to built-in function FNDECL with 0 arguments.
8717 This function returns NULL_TREE if no simplification was possible. */
8720 fold_builtin_0 (location_t loc
, tree fndecl
)
8722 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8723 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
8727 return fold_builtin_FILE (loc
);
8729 case BUILT_IN_FUNCTION
:
8730 return fold_builtin_FUNCTION ();
8733 return fold_builtin_LINE (loc
, type
);
8735 CASE_FLT_FN (BUILT_IN_INF
):
8736 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF
):
8737 case BUILT_IN_INFD32
:
8738 case BUILT_IN_INFD64
:
8739 case BUILT_IN_INFD128
:
8740 return fold_builtin_inf (loc
, type
, true);
8742 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
8743 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL
):
8744 return fold_builtin_inf (loc
, type
, false);
8746 case BUILT_IN_CLASSIFY_TYPE
:
8747 return fold_builtin_classify_type (NULL_TREE
);
8755 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8756 This function returns NULL_TREE if no simplification was possible. */
8759 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
)
8761 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8762 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
8764 if (TREE_CODE (arg0
) == ERROR_MARK
)
8767 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
, arg0
))
8772 case BUILT_IN_CONSTANT_P
:
8774 tree val
= fold_builtin_constant_p (arg0
);
8776 /* Gimplification will pull the CALL_EXPR for the builtin out of
8777 an if condition. When not optimizing, we'll not CSE it back.
8778 To avoid link error types of regressions, return false now. */
8779 if (!val
&& !optimize
)
8780 val
= integer_zero_node
;
8785 case BUILT_IN_CLASSIFY_TYPE
:
8786 return fold_builtin_classify_type (arg0
);
8788 case BUILT_IN_STRLEN
:
8789 return fold_builtin_strlen (loc
, type
, arg0
);
8791 CASE_FLT_FN (BUILT_IN_FABS
):
8792 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
8793 case BUILT_IN_FABSD32
:
8794 case BUILT_IN_FABSD64
:
8795 case BUILT_IN_FABSD128
:
8796 return fold_builtin_fabs (loc
, arg0
, type
);
8800 case BUILT_IN_LLABS
:
8801 case BUILT_IN_IMAXABS
:
8802 return fold_builtin_abs (loc
, arg0
, type
);
8804 CASE_FLT_FN (BUILT_IN_CONJ
):
8805 if (validate_arg (arg0
, COMPLEX_TYPE
)
8806 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8807 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
8810 CASE_FLT_FN (BUILT_IN_CREAL
):
8811 if (validate_arg (arg0
, COMPLEX_TYPE
)
8812 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8813 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));
8816 CASE_FLT_FN (BUILT_IN_CIMAG
):
8817 if (validate_arg (arg0
, COMPLEX_TYPE
)
8818 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8819 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
8822 CASE_FLT_FN (BUILT_IN_CARG
):
8823 return fold_builtin_carg (loc
, arg0
, type
);
8825 case BUILT_IN_ISASCII
:
8826 return fold_builtin_isascii (loc
, arg0
);
8828 case BUILT_IN_TOASCII
:
8829 return fold_builtin_toascii (loc
, arg0
);
8831 case BUILT_IN_ISDIGIT
:
8832 return fold_builtin_isdigit (loc
, arg0
);
8834 CASE_FLT_FN (BUILT_IN_FINITE
):
8835 case BUILT_IN_FINITED32
:
8836 case BUILT_IN_FINITED64
:
8837 case BUILT_IN_FINITED128
:
8838 case BUILT_IN_ISFINITE
:
8840 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
8843 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
8846 CASE_FLT_FN (BUILT_IN_ISINF
):
8847 case BUILT_IN_ISINFD32
:
8848 case BUILT_IN_ISINFD64
:
8849 case BUILT_IN_ISINFD128
:
8851 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
8854 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
8857 case BUILT_IN_ISNORMAL
:
8858 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
8860 case BUILT_IN_ISINF_SIGN
:
8861 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
8863 CASE_FLT_FN (BUILT_IN_ISNAN
):
8864 case BUILT_IN_ISNAND32
:
8865 case BUILT_IN_ISNAND64
:
8866 case BUILT_IN_ISNAND128
:
8867 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
8870 if (integer_zerop (arg0
))
8871 return build_empty_stmt (loc
);
8882 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8883 This function returns NULL_TREE if no simplification was possible. */
8886 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
)
8888 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8889 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
8891 if (TREE_CODE (arg0
) == ERROR_MARK
8892 || TREE_CODE (arg1
) == ERROR_MARK
)
8895 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
, arg0
, arg1
))
8900 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
8901 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
8902 if (validate_arg (arg0
, REAL_TYPE
)
8903 && validate_arg (arg1
, POINTER_TYPE
))
8904 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
8907 CASE_FLT_FN (BUILT_IN_FREXP
):
8908 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
8910 CASE_FLT_FN (BUILT_IN_MODF
):
8911 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
8913 case BUILT_IN_STRSPN
:
8914 return fold_builtin_strspn (loc
, arg0
, arg1
);
8916 case BUILT_IN_STRCSPN
:
8917 return fold_builtin_strcspn (loc
, arg0
, arg1
);
8919 case BUILT_IN_STRPBRK
:
8920 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
8922 case BUILT_IN_EXPECT
:
8923 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
);
8925 case BUILT_IN_ISGREATER
:
8926 return fold_builtin_unordered_cmp (loc
, fndecl
,
8927 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
8928 case BUILT_IN_ISGREATEREQUAL
:
8929 return fold_builtin_unordered_cmp (loc
, fndecl
,
8930 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
8931 case BUILT_IN_ISLESS
:
8932 return fold_builtin_unordered_cmp (loc
, fndecl
,
8933 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
8934 case BUILT_IN_ISLESSEQUAL
:
8935 return fold_builtin_unordered_cmp (loc
, fndecl
,
8936 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
8937 case BUILT_IN_ISLESSGREATER
:
8938 return fold_builtin_unordered_cmp (loc
, fndecl
,
8939 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
8940 case BUILT_IN_ISUNORDERED
:
8941 return fold_builtin_unordered_cmp (loc
, fndecl
,
8942 arg0
, arg1
, UNORDERED_EXPR
,
8945 /* We do the folding for va_start in the expander. */
8946 case BUILT_IN_VA_START
:
8949 case BUILT_IN_OBJECT_SIZE
:
8950 return fold_builtin_object_size (arg0
, arg1
);
8952 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
8953 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
8955 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
8956 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
8964 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8966 This function returns NULL_TREE if no simplification was possible. */
8969 fold_builtin_3 (location_t loc
, tree fndecl
,
8970 tree arg0
, tree arg1
, tree arg2
)
8972 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8973 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
8975 if (TREE_CODE (arg0
) == ERROR_MARK
8976 || TREE_CODE (arg1
) == ERROR_MARK
8977 || TREE_CODE (arg2
) == ERROR_MARK
)
8980 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
,
8987 CASE_FLT_FN (BUILT_IN_SINCOS
):
8988 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
8990 CASE_FLT_FN (BUILT_IN_FMA
):
8991 return fold_builtin_fma (loc
, arg0
, arg1
, arg2
, type
);
8993 CASE_FLT_FN (BUILT_IN_REMQUO
):
8994 if (validate_arg (arg0
, REAL_TYPE
)
8995 && validate_arg (arg1
, REAL_TYPE
)
8996 && validate_arg (arg2
, POINTER_TYPE
))
8997 return do_mpfr_remquo (arg0
, arg1
, arg2
);
9000 case BUILT_IN_MEMCMP
:
9001 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);;
9003 case BUILT_IN_EXPECT
:
9004 return fold_builtin_expect (loc
, arg0
, arg1
, arg2
);
9006 case BUILT_IN_ADD_OVERFLOW
:
9007 case BUILT_IN_SUB_OVERFLOW
:
9008 case BUILT_IN_MUL_OVERFLOW
:
9009 case BUILT_IN_ADD_OVERFLOW_P
:
9010 case BUILT_IN_SUB_OVERFLOW_P
:
9011 case BUILT_IN_MUL_OVERFLOW_P
:
9012 case BUILT_IN_SADD_OVERFLOW
:
9013 case BUILT_IN_SADDL_OVERFLOW
:
9014 case BUILT_IN_SADDLL_OVERFLOW
:
9015 case BUILT_IN_SSUB_OVERFLOW
:
9016 case BUILT_IN_SSUBL_OVERFLOW
:
9017 case BUILT_IN_SSUBLL_OVERFLOW
:
9018 case BUILT_IN_SMUL_OVERFLOW
:
9019 case BUILT_IN_SMULL_OVERFLOW
:
9020 case BUILT_IN_SMULLL_OVERFLOW
:
9021 case BUILT_IN_UADD_OVERFLOW
:
9022 case BUILT_IN_UADDL_OVERFLOW
:
9023 case BUILT_IN_UADDLL_OVERFLOW
:
9024 case BUILT_IN_USUB_OVERFLOW
:
9025 case BUILT_IN_USUBL_OVERFLOW
:
9026 case BUILT_IN_USUBLL_OVERFLOW
:
9027 case BUILT_IN_UMUL_OVERFLOW
:
9028 case BUILT_IN_UMULL_OVERFLOW
:
9029 case BUILT_IN_UMULLL_OVERFLOW
:
9030 return fold_builtin_arith_overflow (loc
, fcode
, arg0
, arg1
, arg2
);
9038 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9039 arguments. IGNORE is true if the result of the
9040 function call is ignored. This function returns NULL_TREE if no
9041 simplification was possible. */
9044 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool)
9046 tree ret
= NULL_TREE
;
9051 ret
= fold_builtin_0 (loc
, fndecl
);
9054 ret
= fold_builtin_1 (loc
, fndecl
, args
[0]);
9057 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1]);
9060 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2]);
9063 ret
= fold_builtin_varargs (loc
, fndecl
, args
, nargs
);
9068 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
9069 SET_EXPR_LOCATION (ret
, loc
);
9070 TREE_NO_WARNING (ret
) = 1;
9076 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9077 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9078 of arguments in ARGS to be omitted. OLDNARGS is the number of
9079 elements in ARGS. */
9082 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
9083 int skip
, tree fndecl
, int n
, va_list newargs
)
9085 int nargs
= oldnargs
- skip
+ n
;
9092 buffer
= XALLOCAVEC (tree
, nargs
);
9093 for (i
= 0; i
< n
; i
++)
9094 buffer
[i
] = va_arg (newargs
, tree
);
9095 for (j
= skip
; j
< oldnargs
; j
++, i
++)
9096 buffer
[i
] = args
[j
];
9099 buffer
= args
+ skip
;
9101 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
9104 /* Return true if FNDECL shouldn't be folded right now.
9105 If a built-in function has an inline attribute always_inline
9106 wrapper, defer folding it after always_inline functions have
9107 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9108 might not be performed. */
9111 avoid_folding_inline_builtin (tree fndecl
)
9113 return (DECL_DECLARED_INLINE_P (fndecl
)
9114 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
9116 && !cfun
->always_inline_functions_inlined
9117 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
9120 /* A wrapper function for builtin folding that prevents warnings for
9121 "statement without effect" and the like, caused by removing the
9122 call node earlier than the warning is generated. */
9125 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
9127 tree ret
= NULL_TREE
;
9128 tree fndecl
= get_callee_fndecl (exp
);
9130 && TREE_CODE (fndecl
) == FUNCTION_DECL
9131 && DECL_BUILT_IN (fndecl
)
9132 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9133 yet. Defer folding until we see all the arguments
9134 (after inlining). */
9135 && !CALL_EXPR_VA_ARG_PACK (exp
))
9137 int nargs
= call_expr_nargs (exp
);
9139 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9140 instead last argument is __builtin_va_arg_pack (). Defer folding
9141 even in that case, until arguments are finalized. */
9142 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
9144 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
9146 && TREE_CODE (fndecl2
) == FUNCTION_DECL
9147 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
9148 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
9152 if (avoid_folding_inline_builtin (fndecl
))
9155 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
9156 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
9157 CALL_EXPR_ARGP (exp
), ignore
);
9160 tree
*args
= CALL_EXPR_ARGP (exp
);
9161 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
9169 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9170 N arguments are passed in the array ARGARRAY. Return a folded
9171 expression or NULL_TREE if no simplification was possible. */
9174 fold_builtin_call_array (location_t loc
, tree
,
9179 if (TREE_CODE (fn
) != ADDR_EXPR
)
9182 tree fndecl
= TREE_OPERAND (fn
, 0);
9183 if (TREE_CODE (fndecl
) == FUNCTION_DECL
9184 && DECL_BUILT_IN (fndecl
))
9186 /* If last argument is __builtin_va_arg_pack (), arguments to this
9187 function are not finalized yet. Defer folding until they are. */
9188 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
9190 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
9192 && TREE_CODE (fndecl2
) == FUNCTION_DECL
9193 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
9194 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
9197 if (avoid_folding_inline_builtin (fndecl
))
9199 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
9200 return targetm
.fold_builtin (fndecl
, n
, argarray
, false);
9202 return fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
9208 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9209 along with N new arguments specified as the "..." parameters. SKIP
9210 is the number of arguments in EXP to be omitted. This function is used
9211 to do varargs-to-varargs transformations. */
9214 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
9220 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
9221 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
9227 /* Validate a single argument ARG against a tree code CODE representing
9228 a type. Return true when argument is valid. */
9231 validate_arg (const_tree arg
, enum tree_code code
)
9235 else if (code
== POINTER_TYPE
)
9236 return POINTER_TYPE_P (TREE_TYPE (arg
));
9237 else if (code
== INTEGER_TYPE
)
9238 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
9239 return code
== TREE_CODE (TREE_TYPE (arg
));
9242 /* This function validates the types of a function call argument list
9243 against a specified list of tree_codes. If the last specifier is a 0,
9244 that represents an ellipses, otherwise the last specifier must be a
9247 This is the GIMPLE version of validate_arglist. Eventually we want to
9248 completely convert builtins.c to work from GIMPLEs and the tree based
9249 validate_arglist will then be removed. */
9252 validate_gimple_arglist (const gcall
*call
, ...)
9254 enum tree_code code
;
9260 va_start (ap
, call
);
9265 code
= (enum tree_code
) va_arg (ap
, int);
9269 /* This signifies an ellipses, any further arguments are all ok. */
9273 /* This signifies an endlink, if no arguments remain, return
9274 true, otherwise return false. */
9275 res
= (i
== gimple_call_num_args (call
));
9278 /* If no parameters remain or the parameter's code does not
9279 match the specified code, return false. Otherwise continue
9280 checking any remaining arguments. */
9281 arg
= gimple_call_arg (call
, i
++);
9282 if (!validate_arg (arg
, code
))
9289 /* We need gotos here since we can only have one VA_CLOSE in a
9297 /* Default target-specific builtin expander that does nothing. */
9300 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
9301 rtx target ATTRIBUTE_UNUSED
,
9302 rtx subtarget ATTRIBUTE_UNUSED
,
9303 machine_mode mode ATTRIBUTE_UNUSED
,
9304 int ignore ATTRIBUTE_UNUSED
)
9309 /* Returns true is EXP represents data that would potentially reside
9310 in a readonly section. */
9313 readonly_data_expr (tree exp
)
9317 if (TREE_CODE (exp
) != ADDR_EXPR
)
9320 exp
= get_base_address (TREE_OPERAND (exp
, 0));
9324 /* Make sure we call decl_readonly_section only for trees it
9325 can handle (since it returns true for everything it doesn't
9327 if (TREE_CODE (exp
) == STRING_CST
9328 || TREE_CODE (exp
) == CONSTRUCTOR
9329 || (VAR_P (exp
) && TREE_STATIC (exp
)))
9330 return decl_readonly_section (exp
, 0);
9335 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9336 to the call, and TYPE is its return type.
9338 Return NULL_TREE if no simplification was possible, otherwise return the
9339 simplified form of the call as a tree.
9341 The simplified form may be a constant or other expression which
9342 computes the same value, but in a more efficient manner (including
9343 calls to other builtin functions).
9345 The call may contain arguments which need to be evaluated, but
9346 which are not useful to determine the result of the call. In
9347 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9348 COMPOUND_EXPR will be an argument which must be evaluated.
9349 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9350 COMPOUND_EXPR in the chain will contain the tree for the simplified
9351 form of the builtin function call. */
9354 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
9356 if (!validate_arg (s1
, POINTER_TYPE
)
9357 || !validate_arg (s2
, POINTER_TYPE
))
9362 const char *p1
, *p2
;
9371 const char *r
= strpbrk (p1
, p2
);
9375 return build_int_cst (TREE_TYPE (s1
), 0);
9377 /* Return an offset into the constant string argument. */
9378 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
9379 return fold_convert_loc (loc
, type
, tem
);
9383 /* strpbrk(x, "") == NULL.
9384 Evaluate and ignore s1 in case it had side-effects. */
9385 return omit_one_operand_loc (loc
, TREE_TYPE (s1
), integer_zero_node
, s1
);
9388 return NULL_TREE
; /* Really call strpbrk. */
9390 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
9394 /* New argument list transforming strpbrk(s1, s2) to
9395 strchr(s1, s2[0]). */
9396 return build_call_expr_loc (loc
, fn
, 2, s1
,
9397 build_int_cst (integer_type_node
, p2
[0]));
9401 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9404 Return NULL_TREE if no simplification was possible, otherwise return the
9405 simplified form of the call as a tree.
9407 The simplified form may be a constant or other expression which
9408 computes the same value, but in a more efficient manner (including
9409 calls to other builtin functions).
9411 The call may contain arguments which need to be evaluated, but
9412 which are not useful to determine the result of the call. In
9413 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9414 COMPOUND_EXPR will be an argument which must be evaluated.
9415 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9416 COMPOUND_EXPR in the chain will contain the tree for the simplified
9417 form of the builtin function call. */
9420 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
9422 if (!validate_arg (s1
, POINTER_TYPE
)
9423 || !validate_arg (s2
, POINTER_TYPE
))
9427 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
9429 /* If either argument is "", return NULL_TREE. */
9430 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
9431 /* Evaluate and ignore both arguments in case either one has
9433 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
9439 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9442 Return NULL_TREE if no simplification was possible, otherwise return the
9443 simplified form of the call as a tree.
9445 The simplified form may be a constant or other expression which
9446 computes the same value, but in a more efficient manner (including
9447 calls to other builtin functions).
9449 The call may contain arguments which need to be evaluated, but
9450 which are not useful to determine the result of the call. In
9451 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9452 COMPOUND_EXPR will be an argument which must be evaluated.
9453 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9454 COMPOUND_EXPR in the chain will contain the tree for the simplified
9455 form of the builtin function call. */
9458 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
9460 if (!validate_arg (s1
, POINTER_TYPE
)
9461 || !validate_arg (s2
, POINTER_TYPE
))
9465 /* If the first argument is "", return NULL_TREE. */
9466 const char *p1
= c_getstr (s1
);
9467 if (p1
&& *p1
== '\0')
9469 /* Evaluate and ignore argument s2 in case it has
9471 return omit_one_operand_loc (loc
, size_type_node
,
9472 size_zero_node
, s2
);
9475 /* If the second argument is "", return __builtin_strlen(s1). */
9476 const char *p2
= c_getstr (s2
);
9477 if (p2
&& *p2
== '\0')
9479 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
9481 /* If the replacement _DECL isn't initialized, don't do the
9486 return build_call_expr_loc (loc
, fn
, 1, s1
);
9492 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9493 produced. False otherwise. This is done so that we don't output the error
9494 or warning twice or three times. */
9497 fold_builtin_next_arg (tree exp
, bool va_start_p
)
9499 tree fntype
= TREE_TYPE (current_function_decl
);
9500 int nargs
= call_expr_nargs (exp
);
9502 /* There is good chance the current input_location points inside the
9503 definition of the va_start macro (perhaps on the token for
9504 builtin) in a system header, so warnings will not be emitted.
9505 Use the location in real source code. */
9506 source_location current_location
=
9507 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
9510 if (!stdarg_p (fntype
))
9512 error ("%<va_start%> used in function with fixed args");
9518 if (va_start_p
&& (nargs
!= 2))
9520 error ("wrong number of arguments to function %<va_start%>");
9523 arg
= CALL_EXPR_ARG (exp
, 1);
9525 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9526 when we checked the arguments and if needed issued a warning. */
9531 /* Evidently an out of date version of <stdarg.h>; can't validate
9532 va_start's second argument, but can still work as intended. */
9533 warning_at (current_location
,
9535 "%<__builtin_next_arg%> called without an argument");
9540 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9543 arg
= CALL_EXPR_ARG (exp
, 0);
9546 if (TREE_CODE (arg
) == SSA_NAME
)
9547 arg
= SSA_NAME_VAR (arg
);
9549 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9550 or __builtin_next_arg (0) the first time we see it, after checking
9551 the arguments and if needed issuing a warning. */
9552 if (!integer_zerop (arg
))
9554 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
9556 /* Strip off all nops for the sake of the comparison. This
9557 is not quite the same as STRIP_NOPS. It does more.
9558 We must also strip off INDIRECT_EXPR for C++ reference
9560 while (CONVERT_EXPR_P (arg
)
9561 || TREE_CODE (arg
) == INDIRECT_REF
)
9562 arg
= TREE_OPERAND (arg
, 0);
9563 if (arg
!= last_parm
)
9565 /* FIXME: Sometimes with the tree optimizers we can get the
9566 not the last argument even though the user used the last
9567 argument. We just warn and set the arg to be the last
9568 argument so that we will get wrong-code because of
9570 warning_at (current_location
,
9572 "second parameter of %<va_start%> not last named argument");
9575 /* Undefined by C99 7.15.1.4p4 (va_start):
9576 "If the parameter parmN is declared with the register storage
9577 class, with a function or array type, or with a type that is
9578 not compatible with the type that results after application of
9579 the default argument promotions, the behavior is undefined."
9581 else if (DECL_REGISTER (arg
))
9583 warning_at (current_location
,
9585 "undefined behavior when second parameter of "
9586 "%<va_start%> is declared with %<register%> storage");
9589 /* We want to verify the second parameter just once before the tree
9590 optimizers are run and then avoid keeping it in the tree,
9591 as otherwise we could warn even for correct code like:
9592 void foo (int i, ...)
9593 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9595 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
9597 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
9603 /* Expand a call EXP to __builtin_object_size. */
9606 expand_builtin_object_size (tree exp
)
9609 int object_size_type
;
9610 tree fndecl
= get_callee_fndecl (exp
);
9612 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
9614 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
9616 expand_builtin_trap ();
9620 ost
= CALL_EXPR_ARG (exp
, 1);
9623 if (TREE_CODE (ost
) != INTEGER_CST
9624 || tree_int_cst_sgn (ost
) < 0
9625 || compare_tree_int (ost
, 3) > 0)
9627 error ("%Klast argument of %qD is not integer constant between 0 and 3",
9629 expand_builtin_trap ();
9633 object_size_type
= tree_to_shwi (ost
);
9635 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
9638 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9639 FCODE is the BUILT_IN_* to use.
9640 Return NULL_RTX if we failed; the caller should emit a normal call,
9641 otherwise try to get the result in TARGET, if convenient (and in
9642 mode MODE if that's convenient). */
9645 expand_builtin_memory_chk (tree exp
, rtx target
, machine_mode mode
,
9646 enum built_in_function fcode
)
9648 tree dest
, src
, len
, size
;
9650 if (!validate_arglist (exp
,
9652 fcode
== BUILT_IN_MEMSET_CHK
9653 ? INTEGER_TYPE
: POINTER_TYPE
,
9654 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
9657 dest
= CALL_EXPR_ARG (exp
, 0);
9658 src
= CALL_EXPR_ARG (exp
, 1);
9659 len
= CALL_EXPR_ARG (exp
, 2);
9660 size
= CALL_EXPR_ARG (exp
, 3);
9662 bool sizes_ok
= check_sizes (OPT_Wstringop_overflow_
,
9663 exp
, len
, /*maxlen=*/NULL_TREE
,
9664 /*str=*/NULL_TREE
, size
);
9666 if (!tree_fits_uhwi_p (size
))
9669 if (tree_fits_uhwi_p (len
) || integer_all_onesp (size
))
9671 /* Avoid transforming the checking call to an ordinary one when
9672 an overflow has been detected or when the call couldn't be
9673 validated because the size is not constant. */
9674 if (!sizes_ok
&& !integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
9677 tree fn
= NULL_TREE
;
9678 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9679 mem{cpy,pcpy,move,set} is available. */
9682 case BUILT_IN_MEMCPY_CHK
:
9683 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
9685 case BUILT_IN_MEMPCPY_CHK
:
9686 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
9688 case BUILT_IN_MEMMOVE_CHK
:
9689 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
9691 case BUILT_IN_MEMSET_CHK
:
9692 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
9701 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
9702 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
9703 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
9704 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
9706 else if (fcode
== BUILT_IN_MEMSET_CHK
)
9710 unsigned int dest_align
= get_pointer_alignment (dest
);
9712 /* If DEST is not a pointer type, call the normal function. */
9713 if (dest_align
== 0)
9716 /* If SRC and DEST are the same (and not volatile), do nothing. */
9717 if (operand_equal_p (src
, dest
, 0))
9721 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
9723 /* Evaluate and ignore LEN in case it has side-effects. */
9724 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
9725 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
9728 expr
= fold_build_pointer_plus (dest
, len
);
9729 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
9732 /* __memmove_chk special case. */
9733 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
9735 unsigned int src_align
= get_pointer_alignment (src
);
9740 /* If src is categorized for a readonly section we can use
9741 normal __memcpy_chk. */
9742 if (readonly_data_expr (src
))
9744 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
9747 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
9748 dest
, src
, len
, size
);
9749 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
9750 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
9751 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
9758 /* Emit warning if a buffer overflow is detected at compile time. */
9761 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
9763 /* The source string. */
9764 tree srcstr
= NULL_TREE
;
9765 /* The size of the destination object. */
9766 tree objsize
= NULL_TREE
;
9767 /* The string that is being concatenated with (as in __strcat_chk)
9768 or null if it isn't. */
9769 tree catstr
= NULL_TREE
;
9770 /* The maximum length of the source sequence in a bounded operation
9771 (such as __strncat_chk) or null if the operation isn't bounded
9772 (such as __strcat_chk). */
9773 tree maxlen
= NULL_TREE
;
9777 case BUILT_IN_STRCPY_CHK
:
9778 case BUILT_IN_STPCPY_CHK
:
9779 srcstr
= CALL_EXPR_ARG (exp
, 1);
9780 objsize
= CALL_EXPR_ARG (exp
, 2);
9783 case BUILT_IN_STRCAT_CHK
:
9784 /* For __strcat_chk the warning will be emitted only if overflowing
9785 by at least strlen (dest) + 1 bytes. */
9786 catstr
= CALL_EXPR_ARG (exp
, 0);
9787 srcstr
= CALL_EXPR_ARG (exp
, 1);
9788 objsize
= CALL_EXPR_ARG (exp
, 2);
9791 case BUILT_IN_STRNCAT_CHK
:
9792 catstr
= CALL_EXPR_ARG (exp
, 0);
9793 srcstr
= CALL_EXPR_ARG (exp
, 1);
9794 maxlen
= CALL_EXPR_ARG (exp
, 2);
9795 objsize
= CALL_EXPR_ARG (exp
, 3);
9798 case BUILT_IN_STRNCPY_CHK
:
9799 case BUILT_IN_STPNCPY_CHK
:
9800 srcstr
= CALL_EXPR_ARG (exp
, 1);
9801 maxlen
= CALL_EXPR_ARG (exp
, 2);
9802 objsize
= CALL_EXPR_ARG (exp
, 3);
9805 case BUILT_IN_SNPRINTF_CHK
:
9806 case BUILT_IN_VSNPRINTF_CHK
:
9807 maxlen
= CALL_EXPR_ARG (exp
, 1);
9808 objsize
= CALL_EXPR_ARG (exp
, 3);
9814 if (catstr
&& maxlen
)
9816 /* Check __strncat_chk. There is no way to determine the length
9817 of the string to which the source string is being appended so
9818 just warn when the length of the source string is not known. */
9819 check_strncat_sizes (exp
, objsize
);
9823 check_sizes (OPT_Wstringop_overflow_
, exp
,
9824 /*size=*/NULL_TREE
, maxlen
, srcstr
, objsize
);
9827 /* Emit warning if a buffer overflow is detected at compile time
9828 in __sprintf_chk/__vsprintf_chk calls. */
9831 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
9833 tree size
, len
, fmt
;
9834 const char *fmt_str
;
9835 int nargs
= call_expr_nargs (exp
);
9837 /* Verify the required arguments in the original call. */
9841 size
= CALL_EXPR_ARG (exp
, 2);
9842 fmt
= CALL_EXPR_ARG (exp
, 3);
9844 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
9847 /* Check whether the format is a literal string constant. */
9848 fmt_str
= c_getstr (fmt
);
9849 if (fmt_str
== NULL
)
9852 if (!init_target_chars ())
9855 /* If the format doesn't contain % args or %%, we know its size. */
9856 if (strchr (fmt_str
, target_percent
) == 0)
9857 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
9858 /* If the format is "%s" and first ... argument is a string literal,
9860 else if (fcode
== BUILT_IN_SPRINTF_CHK
9861 && strcmp (fmt_str
, target_percent_s
) == 0)
9867 arg
= CALL_EXPR_ARG (exp
, 4);
9868 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
9871 len
= c_strlen (arg
, 1);
9872 if (!len
|| ! tree_fits_uhwi_p (len
))
9878 /* Add one for the terminating nul. */
9879 len
= fold_build2 (PLUS_EXPR
, TREE_TYPE (len
), len
, size_one_node
);
9880 check_sizes (OPT_Wstringop_overflow_
,
9881 exp
, /*size=*/NULL_TREE
, /*maxlen=*/NULL_TREE
, len
, size
);
9884 /* Emit warning if a free is called with address of a variable. */
9887 maybe_emit_free_warning (tree exp
)
9889 tree arg
= CALL_EXPR_ARG (exp
, 0);
9892 if (TREE_CODE (arg
) != ADDR_EXPR
)
9895 arg
= get_base_address (TREE_OPERAND (arg
, 0));
9896 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
9899 if (SSA_VAR_P (arg
))
9900 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
9901 "%Kattempt to free a non-heap object %qD", exp
, arg
);
9903 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
9904 "%Kattempt to free a non-heap object", exp
);
9907 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9911 fold_builtin_object_size (tree ptr
, tree ost
)
9913 unsigned HOST_WIDE_INT bytes
;
9914 int object_size_type
;
9916 if (!validate_arg (ptr
, POINTER_TYPE
)
9917 || !validate_arg (ost
, INTEGER_TYPE
))
9922 if (TREE_CODE (ost
) != INTEGER_CST
9923 || tree_int_cst_sgn (ost
) < 0
9924 || compare_tree_int (ost
, 3) > 0)
9927 object_size_type
= tree_to_shwi (ost
);
9929 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9930 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9931 and (size_t) 0 for types 2 and 3. */
9932 if (TREE_SIDE_EFFECTS (ptr
))
9933 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
9935 if (TREE_CODE (ptr
) == ADDR_EXPR
)
9937 compute_builtin_object_size (ptr
, object_size_type
, &bytes
);
9938 if (wi::fits_to_tree_p (bytes
, size_type_node
))
9939 return build_int_cstu (size_type_node
, bytes
);
9941 else if (TREE_CODE (ptr
) == SSA_NAME
)
9943 /* If object size is not known yet, delay folding until
9944 later. Maybe subsequent passes will help determining
9946 if (compute_builtin_object_size (ptr
, object_size_type
, &bytes
)
9947 && wi::fits_to_tree_p (bytes
, size_type_node
))
9948 return build_int_cstu (size_type_node
, bytes
);
9954 /* Builtins with folding operations that operate on "..." arguments
9955 need special handling; we need to store the arguments in a convenient
9956 data structure before attempting any folding. Fortunately there are
9957 only a few builtins that fall into this category. FNDECL is the
9958 function, EXP is the CALL_EXPR for the call. */
9961 fold_builtin_varargs (location_t loc
, tree fndecl
, tree
*args
, int nargs
)
9963 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9964 tree ret
= NULL_TREE
;
9968 case BUILT_IN_FPCLASSIFY
:
9969 ret
= fold_builtin_fpclassify (loc
, args
, nargs
);
9977 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
9978 SET_EXPR_LOCATION (ret
, loc
);
9979 TREE_NO_WARNING (ret
) = 1;
9985 /* Initialize format string characters in the target charset. */
9988 init_target_chars (void)
9993 target_newline
= lang_hooks
.to_target_charset ('\n');
9994 target_percent
= lang_hooks
.to_target_charset ('%');
9995 target_c
= lang_hooks
.to_target_charset ('c');
9996 target_s
= lang_hooks
.to_target_charset ('s');
9997 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
10001 target_percent_c
[0] = target_percent
;
10002 target_percent_c
[1] = target_c
;
10003 target_percent_c
[2] = '\0';
10005 target_percent_s
[0] = target_percent
;
10006 target_percent_s
[1] = target_s
;
10007 target_percent_s
[2] = '\0';
10009 target_percent_s_newline
[0] = target_percent
;
10010 target_percent_s_newline
[1] = target_s
;
10011 target_percent_s_newline
[2] = target_newline
;
10012 target_percent_s_newline
[3] = '\0';
10019 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10020 and no overflow/underflow occurred. INEXACT is true if M was not
10021 exactly calculated. TYPE is the tree type for the result. This
10022 function assumes that you cleared the MPFR flags and then
10023 calculated M to see if anything subsequently set a flag prior to
10024 entering this function. Return NULL_TREE if any checks fail. */
10027 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
10029 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10030 overflow/underflow occurred. If -frounding-math, proceed iff the
10031 result of calling FUNC was exact. */
10032 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10033 && (!flag_rounding_math
|| !inexact
))
10035 REAL_VALUE_TYPE rr
;
10037 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
10038 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10039 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10040 but the mpft_t is not, then we underflowed in the
10042 if (real_isfinite (&rr
)
10043 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
10045 REAL_VALUE_TYPE rmode
;
10047 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
10048 /* Proceed iff the specified mode can hold the value. */
10049 if (real_identical (&rmode
, &rr
))
10050 return build_real (type
, rmode
);
10056 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10057 number and no overflow/underflow occurred. INEXACT is true if M
10058 was not exactly calculated. TYPE is the tree type for the result.
10059 This function assumes that you cleared the MPFR flags and then
10060 calculated M to see if anything subsequently set a flag prior to
10061 entering this function. Return NULL_TREE if any checks fail, if
10062 FORCE_CONVERT is true, then bypass the checks. */
10065 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
10067 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10068 overflow/underflow occurred. If -frounding-math, proceed iff the
10069 result of calling FUNC was exact. */
10071 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
10072 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10073 && (!flag_rounding_math
|| !inexact
)))
10075 REAL_VALUE_TYPE re
, im
;
10077 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
10078 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
10079 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10080 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10081 but the mpft_t is not, then we underflowed in the
10084 || (real_isfinite (&re
) && real_isfinite (&im
)
10085 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
10086 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
10088 REAL_VALUE_TYPE re_mode
, im_mode
;
10090 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
10091 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
10092 /* Proceed iff the specified mode can hold the value. */
10094 || (real_identical (&re_mode
, &re
)
10095 && real_identical (&im_mode
, &im
)))
10096 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
10097 build_real (TREE_TYPE (type
), im_mode
));
10103 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10104 the pointer *(ARG_QUO) and return the result. The type is taken
10105 from the type of ARG0 and is used for setting the precision of the
10106 calculation and results. */
10109 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
10111 tree
const type
= TREE_TYPE (arg0
);
10112 tree result
= NULL_TREE
;
10117 /* To proceed, MPFR must exactly represent the target floating point
10118 format, which only happens when the target base equals two. */
10119 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
10120 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
10121 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
10123 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
10124 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
10126 if (real_isfinite (ra0
) && real_isfinite (ra1
))
10128 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
10129 const int prec
= fmt
->p
;
10130 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10135 mpfr_inits2 (prec
, m0
, m1
, NULL
);
10136 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
10137 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
10138 mpfr_clear_flags ();
10139 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
10140 /* Remquo is independent of the rounding mode, so pass
10141 inexact=0 to do_mpfr_ckconv(). */
10142 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
10143 mpfr_clears (m0
, m1
, NULL
);
10146 /* MPFR calculates quo in the host's long so it may
10147 return more bits in quo than the target int can hold
10148 if sizeof(host long) > sizeof(target int). This can
10149 happen even for native compilers in LP64 mode. In
10150 these cases, modulo the quo value with the largest
10151 number that the target int can hold while leaving one
10152 bit for the sign. */
10153 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
10154 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
10156 /* Dereference the quo pointer argument. */
10157 arg_quo
= build_fold_indirect_ref (arg_quo
);
10158 /* Proceed iff a valid pointer type was passed in. */
10159 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
10161 /* Set the value. */
10163 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
10164 build_int_cst (TREE_TYPE (arg_quo
),
10166 TREE_SIDE_EFFECTS (result_quo
) = 1;
10167 /* Combine the quo assignment with the rem. */
10168 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
10169 result_quo
, result_rem
));
10177 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10178 resulting value as a tree with type TYPE. The mpfr precision is
10179 set to the precision of TYPE. We assume that this mpfr function
10180 returns zero if the result could be calculated exactly within the
10181 requested precision. In addition, the integer pointer represented
10182 by ARG_SG will be dereferenced and set to the appropriate signgam
10186 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
10188 tree result
= NULL_TREE
;
10192 /* To proceed, MPFR must exactly represent the target floating point
10193 format, which only happens when the target base equals two. Also
10194 verify ARG is a constant and that ARG_SG is an int pointer. */
10195 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
10196 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
10197 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
10198 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
10200 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
10202 /* In addition to NaN and Inf, the argument cannot be zero or a
10203 negative integer. */
10204 if (real_isfinite (ra
)
10205 && ra
->cl
!= rvc_zero
10206 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
10208 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
10209 const int prec
= fmt
->p
;
10210 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10215 mpfr_init2 (m
, prec
);
10216 mpfr_from_real (m
, ra
, GMP_RNDN
);
10217 mpfr_clear_flags ();
10218 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
10219 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
10225 /* Dereference the arg_sg pointer argument. */
10226 arg_sg
= build_fold_indirect_ref (arg_sg
);
10227 /* Assign the signgam value into *arg_sg. */
10228 result_sg
= fold_build2 (MODIFY_EXPR
,
10229 TREE_TYPE (arg_sg
), arg_sg
,
10230 build_int_cst (TREE_TYPE (arg_sg
), sg
));
10231 TREE_SIDE_EFFECTS (result_sg
) = 1;
10232 /* Combine the signgam assignment with the lgamma result. */
10233 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
10234 result_sg
, result_lg
));
10242 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10243 mpc function FUNC on it and return the resulting value as a tree
10244 with type TYPE. The mpfr precision is set to the precision of
10245 TYPE. We assume that function FUNC returns zero if the result
10246 could be calculated exactly within the requested precision. If
10247 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10248 in the arguments and/or results. */
10251 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
10252 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
10254 tree result
= NULL_TREE
;
10259 /* To proceed, MPFR must exactly represent the target floating point
10260 format, which only happens when the target base equals two. */
10261 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
10262 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
10263 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
10264 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
10265 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
10267 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
10268 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
10269 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
10270 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
10273 || (real_isfinite (re0
) && real_isfinite (im0
)
10274 && real_isfinite (re1
) && real_isfinite (im1
)))
10276 const struct real_format
*const fmt
=
10277 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
10278 const int prec
= fmt
->p
;
10279 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10280 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
10284 mpc_init2 (m0
, prec
);
10285 mpc_init2 (m1
, prec
);
10286 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
10287 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
10288 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
10289 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
10290 mpfr_clear_flags ();
10291 inexact
= func (m0
, m0
, m1
, crnd
);
10292 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
10301 /* A wrapper function for builtin folding that prevents warnings for
10302 "statement without effect" and the like, caused by removing the
10303 call node earlier than the warning is generated. */
10306 fold_call_stmt (gcall
*stmt
, bool ignore
)
10308 tree ret
= NULL_TREE
;
10309 tree fndecl
= gimple_call_fndecl (stmt
);
10310 location_t loc
= gimple_location (stmt
);
10312 && TREE_CODE (fndecl
) == FUNCTION_DECL
10313 && DECL_BUILT_IN (fndecl
)
10314 && !gimple_call_va_arg_pack_p (stmt
))
10316 int nargs
= gimple_call_num_args (stmt
);
10317 tree
*args
= (nargs
> 0
10318 ? gimple_call_arg_ptr (stmt
, 0)
10319 : &error_mark_node
);
10321 if (avoid_folding_inline_builtin (fndecl
))
10323 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10325 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
10329 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
10332 /* Propagate location information from original call to
10333 expansion of builtin. Otherwise things like
10334 maybe_emit_chk_warning, that operate on the expansion
10335 of a builtin, will use the wrong location information. */
10336 if (gimple_has_location (stmt
))
10338 tree realret
= ret
;
10339 if (TREE_CODE (ret
) == NOP_EXPR
)
10340 realret
= TREE_OPERAND (ret
, 0);
10341 if (CAN_HAVE_LOCATION_P (realret
)
10342 && !EXPR_HAS_LOCATION (realret
))
10343 SET_EXPR_LOCATION (realret
, loc
);
10353 /* Look up the function in builtin_decl that corresponds to DECL
10354 and set ASMSPEC as its user assembler name. DECL must be a
10355 function decl that declares a builtin. */
10358 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
10360 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
10361 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
10364 tree builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
10365 set_user_assembler_name (builtin
, asmspec
);
10367 if (DECL_FUNCTION_CODE (decl
) == BUILT_IN_FFS
10368 && INT_TYPE_SIZE
< BITS_PER_WORD
)
10370 scalar_int_mode mode
= int_mode_for_size (INT_TYPE_SIZE
, 0).require ();
10371 set_user_assembler_libfunc ("ffs", asmspec
);
10372 set_optab_libfunc (ffs_optab
, mode
, "ffs");
10376 /* Return true if DECL is a builtin that expands to a constant or similarly
10379 is_simple_builtin (tree decl
)
10381 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
10382 switch (DECL_FUNCTION_CODE (decl
))
10384 /* Builtins that expand to constants. */
10385 case BUILT_IN_CONSTANT_P
:
10386 case BUILT_IN_EXPECT
:
10387 case BUILT_IN_OBJECT_SIZE
:
10388 case BUILT_IN_UNREACHABLE
:
10389 /* Simple register moves or loads from stack. */
10390 case BUILT_IN_ASSUME_ALIGNED
:
10391 case BUILT_IN_RETURN_ADDRESS
:
10392 case BUILT_IN_EXTRACT_RETURN_ADDR
:
10393 case BUILT_IN_FROB_RETURN_ADDR
:
10394 case BUILT_IN_RETURN
:
10395 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
10396 case BUILT_IN_FRAME_ADDRESS
:
10397 case BUILT_IN_VA_END
:
10398 case BUILT_IN_STACK_SAVE
:
10399 case BUILT_IN_STACK_RESTORE
:
10400 /* Exception state returns or moves registers around. */
10401 case BUILT_IN_EH_FILTER
:
10402 case BUILT_IN_EH_POINTER
:
10403 case BUILT_IN_EH_COPY_VALUES
:
10413 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10414 most probably expanded inline into reasonably simple code. This is a
10415 superset of is_simple_builtin. */
10417 is_inexpensive_builtin (tree decl
)
10421 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
10423 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
10424 switch (DECL_FUNCTION_CODE (decl
))
10427 case BUILT_IN_ALLOCA
:
10428 case BUILT_IN_ALLOCA_WITH_ALIGN
:
10429 case BUILT_IN_BSWAP16
:
10430 case BUILT_IN_BSWAP32
:
10431 case BUILT_IN_BSWAP64
:
10433 case BUILT_IN_CLZIMAX
:
10434 case BUILT_IN_CLZL
:
10435 case BUILT_IN_CLZLL
:
10437 case BUILT_IN_CTZIMAX
:
10438 case BUILT_IN_CTZL
:
10439 case BUILT_IN_CTZLL
:
10441 case BUILT_IN_FFSIMAX
:
10442 case BUILT_IN_FFSL
:
10443 case BUILT_IN_FFSLL
:
10444 case BUILT_IN_IMAXABS
:
10445 case BUILT_IN_FINITE
:
10446 case BUILT_IN_FINITEF
:
10447 case BUILT_IN_FINITEL
:
10448 case BUILT_IN_FINITED32
:
10449 case BUILT_IN_FINITED64
:
10450 case BUILT_IN_FINITED128
:
10451 case BUILT_IN_FPCLASSIFY
:
10452 case BUILT_IN_ISFINITE
:
10453 case BUILT_IN_ISINF_SIGN
:
10454 case BUILT_IN_ISINF
:
10455 case BUILT_IN_ISINFF
:
10456 case BUILT_IN_ISINFL
:
10457 case BUILT_IN_ISINFD32
:
10458 case BUILT_IN_ISINFD64
:
10459 case BUILT_IN_ISINFD128
:
10460 case BUILT_IN_ISNAN
:
10461 case BUILT_IN_ISNANF
:
10462 case BUILT_IN_ISNANL
:
10463 case BUILT_IN_ISNAND32
:
10464 case BUILT_IN_ISNAND64
:
10465 case BUILT_IN_ISNAND128
:
10466 case BUILT_IN_ISNORMAL
:
10467 case BUILT_IN_ISGREATER
:
10468 case BUILT_IN_ISGREATEREQUAL
:
10469 case BUILT_IN_ISLESS
:
10470 case BUILT_IN_ISLESSEQUAL
:
10471 case BUILT_IN_ISLESSGREATER
:
10472 case BUILT_IN_ISUNORDERED
:
10473 case BUILT_IN_VA_ARG_PACK
:
10474 case BUILT_IN_VA_ARG_PACK_LEN
:
10475 case BUILT_IN_VA_COPY
:
10476 case BUILT_IN_TRAP
:
10477 case BUILT_IN_SAVEREGS
:
10478 case BUILT_IN_POPCOUNTL
:
10479 case BUILT_IN_POPCOUNTLL
:
10480 case BUILT_IN_POPCOUNTIMAX
:
10481 case BUILT_IN_POPCOUNT
:
10482 case BUILT_IN_PARITYL
:
10483 case BUILT_IN_PARITYLL
:
10484 case BUILT_IN_PARITYIMAX
:
10485 case BUILT_IN_PARITY
:
10486 case BUILT_IN_LABS
:
10487 case BUILT_IN_LLABS
:
10488 case BUILT_IN_PREFETCH
:
10489 case BUILT_IN_ACC_ON_DEVICE
:
10493 return is_simple_builtin (decl
);
10499 /* Return true if T is a constant and the value cast to a target char
10500 can be represented by a host char.
10501 Store the casted char constant in *P if so. */
10504 target_char_cst_p (tree t
, char *p
)
10506 if (!tree_fits_uhwi_p (t
) || CHAR_TYPE_SIZE
!= HOST_BITS_PER_CHAR
)
10509 *p
= (char)tree_to_uhwi (t
);