1 /* Expand builtin functions.
2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
26 #include "coretypes.h"
35 #include "stringpool.h"
37 #include "tree-ssanames.h"
42 #include "diagnostic-core.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
50 #include "tree-object-size.h"
60 #include "typeclass.h"
61 #include "langhooks.h"
62 #include "value-prof.h"
64 #include "stringpool.h"
67 #include "tree-chkp.h"
69 #include "internal-fn.h"
70 #include "case-cfn-macros.h"
71 #include "gimple-fold.h"
73 #include "file-prefix-map.h" /* remap_macro_filename() */
74 #include "gomp-constants.h"
75 #include "omp-general.h"
77 struct target_builtins default_target_builtins
;
79 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
82 /* Define the names of the builtin function types and codes. */
83 const char *const built_in_class_names
[BUILT_IN_LAST
]
84 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
86 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
87 const char * built_in_names
[(int) END_BUILTINS
] =
89 #include "builtins.def"
92 /* Setup an array of builtin_info_type, make sure each element decl is
93 initialized to NULL_TREE. */
94 builtin_info_type builtin_info
[(int)END_BUILTINS
];
96 /* Non-zero if __builtin_constant_p should be folded right away. */
97 bool force_folding_builtin_constant_p
;
99 static rtx
c_readstr (const char *, scalar_int_mode
);
100 static int target_char_cast (tree
, char *);
101 static rtx
get_memory_rtx (tree
, tree
);
102 static int apply_args_size (void);
103 static int apply_result_size (void);
104 static rtx
result_vector (int, rtx
);
105 static void expand_builtin_prefetch (tree
);
106 static rtx
expand_builtin_apply_args (void);
107 static rtx
expand_builtin_apply_args_1 (void);
108 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
109 static void expand_builtin_return (rtx
);
110 static enum type_class
type_to_class (tree
);
111 static rtx
expand_builtin_classify_type (tree
);
112 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
113 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
114 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
115 static rtx
expand_builtin_sincos (tree
);
116 static rtx
expand_builtin_cexpi (tree
, rtx
);
117 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
118 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
119 static rtx
expand_builtin_next_arg (void);
120 static rtx
expand_builtin_va_start (tree
);
121 static rtx
expand_builtin_va_end (tree
);
122 static rtx
expand_builtin_va_copy (tree
);
123 static rtx
expand_builtin_strcmp (tree
, rtx
);
124 static rtx
expand_builtin_strncmp (tree
, rtx
, machine_mode
);
125 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, scalar_int_mode
);
126 static rtx
expand_builtin_memchr (tree
, rtx
);
127 static rtx
expand_builtin_memcpy (tree
, rtx
);
128 static rtx
expand_builtin_memcpy_with_bounds (tree
, rtx
);
129 static rtx
expand_builtin_memory_copy_args (tree dest
, tree src
, tree len
,
130 rtx target
, tree exp
, int endp
);
131 static rtx
expand_builtin_memmove (tree
, rtx
);
132 static rtx
expand_builtin_mempcpy (tree
, rtx
);
133 static rtx
expand_builtin_mempcpy_with_bounds (tree
, rtx
);
134 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
, tree
, int);
135 static rtx
expand_builtin_strcat (tree
, rtx
);
136 static rtx
expand_builtin_strcpy (tree
, rtx
);
137 static rtx
expand_builtin_strcpy_args (tree
, tree
, rtx
);
138 static rtx
expand_builtin_stpcpy (tree
, rtx
, machine_mode
);
139 static rtx
expand_builtin_stpncpy (tree
, rtx
);
140 static rtx
expand_builtin_strncat (tree
, rtx
);
141 static rtx
expand_builtin_strncpy (tree
, rtx
);
142 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, scalar_int_mode
);
143 static rtx
expand_builtin_memset (tree
, rtx
, machine_mode
);
144 static rtx
expand_builtin_memset_with_bounds (tree
, rtx
, machine_mode
);
145 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, machine_mode
, tree
);
146 static rtx
expand_builtin_bzero (tree
);
147 static rtx
expand_builtin_strlen (tree
, rtx
, machine_mode
);
148 static rtx
expand_builtin_alloca (tree
);
149 static rtx
expand_builtin_unop (machine_mode
, tree
, rtx
, rtx
, optab
);
150 static rtx
expand_builtin_frame_address (tree
, tree
);
151 static tree
stabilize_va_list_loc (location_t
, tree
, int);
152 static rtx
expand_builtin_expect (tree
, rtx
);
153 static tree
fold_builtin_constant_p (tree
);
154 static tree
fold_builtin_classify_type (tree
);
155 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
156 static tree
fold_builtin_inf (location_t
, tree
, int);
157 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
158 static bool validate_arg (const_tree
, enum tree_code code
);
159 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
160 static rtx
expand_builtin_signbit (tree
, rtx
);
161 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
162 static tree
fold_builtin_isascii (location_t
, tree
);
163 static tree
fold_builtin_toascii (location_t
, tree
);
164 static tree
fold_builtin_isdigit (location_t
, tree
);
165 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
166 static tree
fold_builtin_abs (location_t
, tree
, tree
);
167 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
169 static tree
fold_builtin_0 (location_t
, tree
);
170 static tree
fold_builtin_1 (location_t
, tree
, tree
);
171 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
);
172 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
);
173 static tree
fold_builtin_varargs (location_t
, tree
, tree
*, int);
175 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
176 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
177 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
179 static rtx
expand_builtin_object_size (tree
);
180 static rtx
expand_builtin_memory_chk (tree
, rtx
, machine_mode
,
181 enum built_in_function
);
182 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
183 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
184 static void maybe_emit_free_warning (tree
);
185 static tree
fold_builtin_object_size (tree
, tree
);
187 unsigned HOST_WIDE_INT target_newline
;
188 unsigned HOST_WIDE_INT target_percent
;
189 static unsigned HOST_WIDE_INT target_c
;
190 static unsigned HOST_WIDE_INT target_s
;
191 char target_percent_c
[3];
192 char target_percent_s
[3];
193 char target_percent_s_newline
[4];
194 static tree
do_mpfr_remquo (tree
, tree
, tree
);
195 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
196 static void expand_builtin_sync_synchronize (void);
198 /* Return true if NAME starts with __builtin_ or __sync_. */
201 is_builtin_name (const char *name
)
203 if (strncmp (name
, "__builtin_", 10) == 0)
205 if (strncmp (name
, "__sync_", 7) == 0)
207 if (strncmp (name
, "__atomic_", 9) == 0)
213 /* Return true if DECL is a function symbol representing a built-in. */
216 is_builtin_fn (tree decl
)
218 return TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
);
221 /* Return true if NODE should be considered for inline expansion regardless
222 of the optimization level. This means whenever a function is invoked with
223 its "internal" name, which normally contains the prefix "__builtin". */
226 called_as_built_in (tree node
)
228 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
229 we want the name used to call the function, not the name it
231 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
232 return is_builtin_name (name
);
235 /* Compute values M and N such that M divides (address of EXP - N) and such
236 that N < M. If these numbers can be determined, store M in alignp and N in
237 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
238 *alignp and any bit-offset to *bitposp.
240 Note that the address (and thus the alignment) computed here is based
241 on the address to which a symbol resolves, whereas DECL_ALIGN is based
242 on the address at which an object is actually located. These two
243 addresses are not always the same. For example, on ARM targets,
244 the address &foo of a Thumb function foo() has the lowest bit set,
245 whereas foo() itself starts on an even address.
247 If ADDR_P is true we are taking the address of the memory reference EXP
248 and thus cannot rely on the access taking place. */
251 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
252 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
254 poly_int64 bitsize
, bitpos
;
257 int unsignedp
, reversep
, volatilep
;
258 unsigned int align
= BITS_PER_UNIT
;
259 bool known_alignment
= false;
261 /* Get the innermost object and the constant (bitpos) and possibly
262 variable (offset) offset of the access. */
263 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
264 &unsignedp
, &reversep
, &volatilep
);
266 /* Extract alignment information from the innermost object and
267 possibly adjust bitpos and offset. */
268 if (TREE_CODE (exp
) == FUNCTION_DECL
)
270 /* Function addresses can encode extra information besides their
271 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
272 allows the low bit to be used as a virtual bit, we know
273 that the address itself must be at least 2-byte aligned. */
274 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
275 align
= 2 * BITS_PER_UNIT
;
277 else if (TREE_CODE (exp
) == LABEL_DECL
)
279 else if (TREE_CODE (exp
) == CONST_DECL
)
281 /* The alignment of a CONST_DECL is determined by its initializer. */
282 exp
= DECL_INITIAL (exp
);
283 align
= TYPE_ALIGN (TREE_TYPE (exp
));
284 if (CONSTANT_CLASS_P (exp
))
285 align
= targetm
.constant_alignment (exp
, align
);
287 known_alignment
= true;
289 else if (DECL_P (exp
))
291 align
= DECL_ALIGN (exp
);
292 known_alignment
= true;
294 else if (TREE_CODE (exp
) == INDIRECT_REF
295 || TREE_CODE (exp
) == MEM_REF
296 || TREE_CODE (exp
) == TARGET_MEM_REF
)
298 tree addr
= TREE_OPERAND (exp
, 0);
300 unsigned HOST_WIDE_INT ptr_bitpos
;
301 unsigned HOST_WIDE_INT ptr_bitmask
= ~0;
303 /* If the address is explicitely aligned, handle that. */
304 if (TREE_CODE (addr
) == BIT_AND_EXPR
305 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
307 ptr_bitmask
= TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1));
308 ptr_bitmask
*= BITS_PER_UNIT
;
309 align
= least_bit_hwi (ptr_bitmask
);
310 addr
= TREE_OPERAND (addr
, 0);
314 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
315 align
= MAX (ptr_align
, align
);
317 /* Re-apply explicit alignment to the bitpos. */
318 ptr_bitpos
&= ptr_bitmask
;
320 /* The alignment of the pointer operand in a TARGET_MEM_REF
321 has to take the variable offset parts into account. */
322 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
326 unsigned HOST_WIDE_INT step
= 1;
328 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
329 align
= MIN (align
, least_bit_hwi (step
) * BITS_PER_UNIT
);
331 if (TMR_INDEX2 (exp
))
332 align
= BITS_PER_UNIT
;
333 known_alignment
= false;
336 /* When EXP is an actual memory reference then we can use
337 TYPE_ALIGN of a pointer indirection to derive alignment.
338 Do so only if get_pointer_alignment_1 did not reveal absolute
339 alignment knowledge and if using that alignment would
340 improve the situation. */
342 if (!addr_p
&& !known_alignment
343 && (talign
= min_align_of_type (TREE_TYPE (exp
)) * BITS_PER_UNIT
)
348 /* Else adjust bitpos accordingly. */
349 bitpos
+= ptr_bitpos
;
350 if (TREE_CODE (exp
) == MEM_REF
351 || TREE_CODE (exp
) == TARGET_MEM_REF
)
352 bitpos
+= mem_ref_offset (exp
).force_shwi () * BITS_PER_UNIT
;
355 else if (TREE_CODE (exp
) == STRING_CST
)
357 /* STRING_CST are the only constant objects we allow to be not
358 wrapped inside a CONST_DECL. */
359 align
= TYPE_ALIGN (TREE_TYPE (exp
));
360 if (CONSTANT_CLASS_P (exp
))
361 align
= targetm
.constant_alignment (exp
, align
);
363 known_alignment
= true;
366 /* If there is a non-constant offset part extract the maximum
367 alignment that can prevail. */
370 unsigned int trailing_zeros
= tree_ctz (offset
);
371 if (trailing_zeros
< HOST_BITS_PER_INT
)
373 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
375 align
= MIN (align
, inner
);
379 /* Account for the alignment of runtime coefficients, so that the constant
380 bitpos is guaranteed to be accurate. */
381 unsigned int alt_align
= ::known_alignment (bitpos
- bitpos
.coeffs
[0]);
382 if (alt_align
!= 0 && alt_align
< align
)
385 known_alignment
= false;
389 *bitposp
= bitpos
.coeffs
[0] & (align
- 1);
390 return known_alignment
;
393 /* For a memory reference expression EXP compute values M and N such that M
394 divides (&EXP - N) and such that N < M. If these numbers can be determined,
395 store M in alignp and N in *BITPOSP and return true. Otherwise return false
396 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
399 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
400 unsigned HOST_WIDE_INT
*bitposp
)
402 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
405 /* Return the alignment in bits of EXP, an object. */
408 get_object_alignment (tree exp
)
410 unsigned HOST_WIDE_INT bitpos
= 0;
413 get_object_alignment_1 (exp
, &align
, &bitpos
);
415 /* align and bitpos now specify known low bits of the pointer.
416 ptr & (align - 1) == bitpos. */
419 align
= least_bit_hwi (bitpos
);
423 /* For a pointer valued expression EXP compute values M and N such that M
424 divides (EXP - N) and such that N < M. If these numbers can be determined,
425 store M in alignp and N in *BITPOSP and return true. Return false if
426 the results are just a conservative approximation.
428 If EXP is not a pointer, false is returned too. */
431 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
432 unsigned HOST_WIDE_INT
*bitposp
)
436 if (TREE_CODE (exp
) == ADDR_EXPR
)
437 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
438 alignp
, bitposp
, true);
439 else if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
)
442 unsigned HOST_WIDE_INT bitpos
;
443 bool res
= get_pointer_alignment_1 (TREE_OPERAND (exp
, 0),
445 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
446 bitpos
+= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
;
449 unsigned int trailing_zeros
= tree_ctz (TREE_OPERAND (exp
, 1));
450 if (trailing_zeros
< HOST_BITS_PER_INT
)
452 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
454 align
= MIN (align
, inner
);
458 *bitposp
= bitpos
& (align
- 1);
461 else if (TREE_CODE (exp
) == SSA_NAME
462 && POINTER_TYPE_P (TREE_TYPE (exp
)))
464 unsigned int ptr_align
, ptr_misalign
;
465 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
467 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
469 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
470 *alignp
= ptr_align
* BITS_PER_UNIT
;
471 /* Make sure to return a sensible alignment when the multiplication
472 by BITS_PER_UNIT overflowed. */
474 *alignp
= 1u << (HOST_BITS_PER_INT
- 1);
475 /* We cannot really tell whether this result is an approximation. */
481 *alignp
= BITS_PER_UNIT
;
485 else if (TREE_CODE (exp
) == INTEGER_CST
)
487 *alignp
= BIGGEST_ALIGNMENT
;
488 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
489 & (BIGGEST_ALIGNMENT
- 1));
494 *alignp
= BITS_PER_UNIT
;
498 /* Return the alignment in bits of EXP, a pointer valued expression.
499 The alignment returned is, by default, the alignment of the thing that
500 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
502 Otherwise, look at the expression to see if we can do better, i.e., if the
503 expression is actually pointing at an object whose alignment is tighter. */
506 get_pointer_alignment (tree exp
)
508 unsigned HOST_WIDE_INT bitpos
= 0;
511 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
513 /* align and bitpos now specify known low bits of the pointer.
514 ptr & (align - 1) == bitpos. */
517 align
= least_bit_hwi (bitpos
);
522 /* Return the number of non-zero elements in the sequence
523 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
524 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
527 string_length (const void *ptr
, unsigned eltsize
, unsigned maxelts
)
529 gcc_checking_assert (eltsize
== 1 || eltsize
== 2 || eltsize
== 4);
535 /* Optimize the common case of plain char. */
536 for (n
= 0; n
< maxelts
; n
++)
538 const char *elt
= (const char*) ptr
+ n
;
545 for (n
= 0; n
< maxelts
; n
++)
547 const char *elt
= (const char*) ptr
+ n
* eltsize
;
548 if (!memcmp (elt
, "\0\0\0\0", eltsize
))
555 /* Compute the length of a null-terminated character string or wide
556 character string handling character sizes of 1, 2, and 4 bytes.
557 TREE_STRING_LENGTH is not the right way because it evaluates to
558 the size of the character array in bytes (as opposed to characters)
559 and because it can contain a zero byte in the middle.
561 ONLY_VALUE should be nonzero if the result is not going to be emitted
562 into the instruction stream and zero if it is going to be expanded.
563 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
564 is returned, otherwise NULL, since
565 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
566 evaluate the side-effects.
568 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
569 accesses. Note that this implies the result is not going to be emitted
570 into the instruction stream.
572 The value returned is of type `ssizetype'.
574 Unfortunately, string_constant can't access the values of const char
575 arrays with initializers, so neither can we do so here. */
578 c_strlen (tree src
, int only_value
)
581 if (TREE_CODE (src
) == COND_EXPR
582 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
586 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
587 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
588 if (tree_int_cst_equal (len1
, len2
))
592 if (TREE_CODE (src
) == COMPOUND_EXPR
593 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
594 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
596 location_t loc
= EXPR_LOC_OR_LOC (src
, input_location
);
598 /* Offset from the beginning of the string in bytes. */
600 src
= string_constant (src
, &byteoff
);
604 /* Determine the size of the string element. */
606 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src
))));
608 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
610 unsigned maxelts
= TREE_STRING_LENGTH (src
) / eltsize
- 1;
612 /* PTR can point to the byte representation of any string type, including
613 char* and wchar_t*. */
614 const char *ptr
= TREE_STRING_POINTER (src
);
616 if (byteoff
&& TREE_CODE (byteoff
) != INTEGER_CST
)
618 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
619 compute the offset to the following null if we don't know where to
620 start searching for it. */
621 if (string_length (ptr
, eltsize
, maxelts
) < maxelts
)
623 /* Return when an embedded null character is found. */
628 return ssize_int (0);
630 /* We don't know the starting offset, but we do know that the string
631 has no internal zero bytes. We can assume that the offset falls
632 within the bounds of the string; otherwise, the programmer deserves
633 what he gets. Subtract the offset from the length of the string,
634 and return that. This would perhaps not be valid if we were dealing
635 with named arrays in addition to literal string constants. */
637 return size_diffop_loc (loc
, size_int (maxelts
* eltsize
), byteoff
);
640 /* Offset from the beginning of the string in elements. */
641 HOST_WIDE_INT eltoff
;
643 /* We have a known offset into the string. Start searching there for
644 a null character if we can represent it as a single HOST_WIDE_INT. */
647 else if (! tree_fits_shwi_p (byteoff
))
650 eltoff
= tree_to_shwi (byteoff
) / eltsize
;
652 /* If the offset is known to be out of bounds, warn, and call strlen at
654 if (eltoff
< 0 || eltoff
> maxelts
)
656 /* Suppress multiple warnings for propagated constant strings. */
658 && !TREE_NO_WARNING (src
))
660 warning_at (loc
, OPT_Warray_bounds
,
661 "offset %qwi outside bounds of constant string",
663 TREE_NO_WARNING (src
) = 1;
668 /* Use strlen to search for the first zero byte. Since any strings
669 constructed with build_string will have nulls appended, we win even
670 if we get handed something like (char[4])"abcd".
672 Since ELTOFF is our starting index into the string, no further
673 calculation is needed. */
674 unsigned len
= string_length (ptr
+ eltoff
* eltsize
, eltsize
,
677 return ssize_int (len
);
680 /* Return a constant integer corresponding to target reading
681 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
684 c_readstr (const char *str
, scalar_int_mode mode
)
688 HOST_WIDE_INT tmp
[MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
];
690 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
691 unsigned int len
= (GET_MODE_PRECISION (mode
) + HOST_BITS_PER_WIDE_INT
- 1)
692 / HOST_BITS_PER_WIDE_INT
;
694 gcc_assert (len
<= MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
);
695 for (i
= 0; i
< len
; i
++)
699 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
702 if (WORDS_BIG_ENDIAN
)
703 j
= GET_MODE_SIZE (mode
) - i
- 1;
704 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
705 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
706 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
710 ch
= (unsigned char) str
[i
];
711 tmp
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
714 wide_int c
= wide_int::from_array (tmp
, len
, GET_MODE_PRECISION (mode
));
715 return immed_wide_int_const (c
, mode
);
718 /* Cast a target constant CST to target CHAR and if that value fits into
719 host char type, return zero and put that value into variable pointed to by
723 target_char_cast (tree cst
, char *p
)
725 unsigned HOST_WIDE_INT val
, hostval
;
727 if (TREE_CODE (cst
) != INTEGER_CST
728 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
731 /* Do not care if it fits or not right here. */
732 val
= TREE_INT_CST_LOW (cst
);
734 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
735 val
&= (HOST_WIDE_INT_1U
<< CHAR_TYPE_SIZE
) - 1;
738 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
739 hostval
&= (HOST_WIDE_INT_1U
<< HOST_BITS_PER_CHAR
) - 1;
748 /* Similar to save_expr, but assumes that arbitrary code is not executed
749 in between the multiple evaluations. In particular, we assume that a
750 non-addressable local variable will not be modified. */
753 builtin_save_expr (tree exp
)
755 if (TREE_CODE (exp
) == SSA_NAME
756 || (TREE_ADDRESSABLE (exp
) == 0
757 && (TREE_CODE (exp
) == PARM_DECL
758 || (VAR_P (exp
) && !TREE_STATIC (exp
)))))
761 return save_expr (exp
);
764 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
765 times to get the address of either a higher stack frame, or a return
766 address located within it (depending on FNDECL_CODE). */
769 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
772 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
775 /* For a zero count with __builtin_return_address, we don't care what
776 frame address we return, because target-specific definitions will
777 override us. Therefore frame pointer elimination is OK, and using
778 the soft frame pointer is OK.
780 For a nonzero count, or a zero count with __builtin_frame_address,
781 we require a stable offset from the current frame pointer to the
782 previous one, so we must use the hard frame pointer, and
783 we must disable frame pointer elimination. */
784 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
785 tem
= frame_pointer_rtx
;
788 tem
= hard_frame_pointer_rtx
;
790 /* Tell reload not to eliminate the frame pointer. */
791 crtl
->accesses_prior_frames
= 1;
796 SETUP_FRAME_ADDRESSES ();
798 /* On the SPARC, the return address is not in the frame, it is in a
799 register. There is no way to access it off of the current frame
800 pointer, but it can be accessed off the previous frame pointer by
801 reading the value from the register window save area. */
802 if (RETURN_ADDR_IN_PREVIOUS_FRAME
&& fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
805 /* Scan back COUNT frames to the specified frame. */
806 for (i
= 0; i
< count
; i
++)
808 /* Assume the dynamic chain pointer is in the word that the
809 frame address points to, unless otherwise specified. */
810 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
811 tem
= memory_address (Pmode
, tem
);
812 tem
= gen_frame_mem (Pmode
, tem
);
813 tem
= copy_to_reg (tem
);
816 /* For __builtin_frame_address, return what we've got. But, on
817 the SPARC for example, we may have to add a bias. */
818 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
819 return FRAME_ADDR_RTX (tem
);
821 /* For __builtin_return_address, get the return address from that frame. */
822 #ifdef RETURN_ADDR_RTX
823 tem
= RETURN_ADDR_RTX (count
, tem
);
825 tem
= memory_address (Pmode
,
826 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
827 tem
= gen_frame_mem (Pmode
, tem
);
832 /* Alias set used for setjmp buffer. */
833 static alias_set_type setjmp_alias_set
= -1;
835 /* Construct the leading half of a __builtin_setjmp call. Control will
836 return to RECEIVER_LABEL. This is also called directly by the SJLJ
837 exception handling code. */
840 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
842 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
846 if (setjmp_alias_set
== -1)
847 setjmp_alias_set
= new_alias_set ();
849 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
851 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
853 /* We store the frame pointer and the address of receiver_label in
854 the buffer and use the rest of it for the stack save area, which
855 is machine-dependent. */
857 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
858 set_mem_alias_set (mem
, setjmp_alias_set
);
859 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
861 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
862 GET_MODE_SIZE (Pmode
))),
863 set_mem_alias_set (mem
, setjmp_alias_set
);
865 emit_move_insn (validize_mem (mem
),
866 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
868 stack_save
= gen_rtx_MEM (sa_mode
,
869 plus_constant (Pmode
, buf_addr
,
870 2 * GET_MODE_SIZE (Pmode
)));
871 set_mem_alias_set (stack_save
, setjmp_alias_set
);
872 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
874 /* If there is further processing to do, do it. */
875 if (targetm
.have_builtin_setjmp_setup ())
876 emit_insn (targetm
.gen_builtin_setjmp_setup (buf_addr
));
878 /* We have a nonlocal label. */
879 cfun
->has_nonlocal_label
= 1;
882 /* Construct the trailing part of a __builtin_setjmp call. This is
883 also called directly by the SJLJ exception handling code.
884 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
887 expand_builtin_setjmp_receiver (rtx receiver_label
)
891 /* Mark the FP as used when we get here, so we have to make sure it's
892 marked as used by this function. */
893 emit_use (hard_frame_pointer_rtx
);
895 /* Mark the static chain as clobbered here so life information
896 doesn't get messed up for it. */
897 chain
= rtx_for_static_chain (current_function_decl
, true);
898 if (chain
&& REG_P (chain
))
899 emit_clobber (chain
);
901 /* Now put in the code to restore the frame pointer, and argument
902 pointer, if needed. */
903 if (! targetm
.have_nonlocal_goto ())
905 /* First adjust our frame pointer to its actual value. It was
906 previously set to the start of the virtual area corresponding to
907 the stacked variables when we branched here and now needs to be
908 adjusted to the actual hardware fp value.
910 Assignments to virtual registers are converted by
911 instantiate_virtual_regs into the corresponding assignment
912 to the underlying register (fp in this case) that makes
913 the original assignment true.
914 So the following insn will actually be decrementing fp by
915 TARGET_STARTING_FRAME_OFFSET. */
916 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
918 /* Restoring the frame pointer also modifies the hard frame pointer.
919 Mark it used (so that the previous assignment remains live once
920 the frame pointer is eliminated) and clobbered (to represent the
921 implicit update from the assignment). */
922 emit_use (hard_frame_pointer_rtx
);
923 emit_clobber (hard_frame_pointer_rtx
);
926 if (!HARD_FRAME_POINTER_IS_ARG_POINTER
&& fixed_regs
[ARG_POINTER_REGNUM
])
928 /* If the argument pointer can be eliminated in favor of the
929 frame pointer, we don't need to restore it. We assume here
930 that if such an elimination is present, it can always be used.
931 This is the case on all known machines; if we don't make this
932 assumption, we do unnecessary saving on many machines. */
934 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
936 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
937 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
938 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
941 if (i
== ARRAY_SIZE (elim_regs
))
943 /* Now restore our arg pointer from the address at which it
944 was saved in our stack frame. */
945 emit_move_insn (crtl
->args
.internal_arg_pointer
,
946 copy_to_reg (get_arg_pointer_save_area ()));
950 if (receiver_label
!= NULL
&& targetm
.have_builtin_setjmp_receiver ())
951 emit_insn (targetm
.gen_builtin_setjmp_receiver (receiver_label
));
952 else if (targetm
.have_nonlocal_goto_receiver ())
953 emit_insn (targetm
.gen_nonlocal_goto_receiver ());
957 /* We must not allow the code we just generated to be reordered by
958 scheduling. Specifically, the update of the frame pointer must
959 happen immediately, not later. */
960 emit_insn (gen_blockage ());
963 /* __builtin_longjmp is passed a pointer to an array of five words (not
964 all will be used on all machines). It operates similarly to the C
965 library function of the same name, but is more efficient. Much of
966 the code below is copied from the handling of non-local gotos. */
969 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
972 rtx_insn
*insn
, *last
;
973 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
975 /* DRAP is needed for stack realign if longjmp is expanded to current
977 if (SUPPORTS_STACK_ALIGNMENT
)
978 crtl
->need_drap
= true;
980 if (setjmp_alias_set
== -1)
981 setjmp_alias_set
= new_alias_set ();
983 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
985 buf_addr
= force_reg (Pmode
, buf_addr
);
987 /* We require that the user must pass a second argument of 1, because
988 that is what builtin_setjmp will return. */
989 gcc_assert (value
== const1_rtx
);
991 last
= get_last_insn ();
992 if (targetm
.have_builtin_longjmp ())
993 emit_insn (targetm
.gen_builtin_longjmp (buf_addr
));
996 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
997 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
998 GET_MODE_SIZE (Pmode
)));
1000 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
1001 2 * GET_MODE_SIZE (Pmode
)));
1002 set_mem_alias_set (fp
, setjmp_alias_set
);
1003 set_mem_alias_set (lab
, setjmp_alias_set
);
1004 set_mem_alias_set (stack
, setjmp_alias_set
);
1006 /* Pick up FP, label, and SP from the block and jump. This code is
1007 from expand_goto in stmt.c; see there for detailed comments. */
1008 if (targetm
.have_nonlocal_goto ())
1009 /* We have to pass a value to the nonlocal_goto pattern that will
1010 get copied into the static_chain pointer, but it does not matter
1011 what that value is, because builtin_setjmp does not use it. */
1012 emit_insn (targetm
.gen_nonlocal_goto (value
, lab
, stack
, fp
));
1015 lab
= copy_to_reg (lab
);
1017 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1018 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1020 emit_move_insn (hard_frame_pointer_rtx
, fp
);
1021 emit_stack_restore (SAVE_NONLOCAL
, stack
);
1023 emit_use (hard_frame_pointer_rtx
);
1024 emit_use (stack_pointer_rtx
);
1025 emit_indirect_jump (lab
);
1029 /* Search backwards and mark the jump insn as a non-local goto.
1030 Note that this precludes the use of __builtin_longjmp to a
1031 __builtin_setjmp target in the same function. However, we've
1032 already cautioned the user that these functions are for
1033 internal exception handling use only. */
1034 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1036 gcc_assert (insn
!= last
);
1040 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1043 else if (CALL_P (insn
))
1049 more_const_call_expr_args_p (const const_call_expr_arg_iterator
*iter
)
1051 return (iter
->i
< iter
->n
);
1054 /* This function validates the types of a function call argument list
1055 against a specified list of tree_codes. If the last specifier is a 0,
1056 that represents an ellipsis, otherwise the last specifier must be a
1060 validate_arglist (const_tree callexpr
, ...)
1062 enum tree_code code
;
1065 const_call_expr_arg_iterator iter
;
1068 va_start (ap
, callexpr
);
1069 init_const_call_expr_arg_iterator (callexpr
, &iter
);
1071 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1072 tree fn
= CALL_EXPR_FN (callexpr
);
1073 bitmap argmap
= get_nonnull_args (TREE_TYPE (TREE_TYPE (fn
)));
1075 for (unsigned argno
= 1; ; ++argno
)
1077 code
= (enum tree_code
) va_arg (ap
, int);
1082 /* This signifies an ellipses, any further arguments are all ok. */
1086 /* This signifies an endlink, if no arguments remain, return
1087 true, otherwise return false. */
1088 res
= !more_const_call_expr_args_p (&iter
);
1091 /* The actual argument must be nonnull when either the whole
1092 called function has been declared nonnull, or when the formal
1093 argument corresponding to the actual argument has been. */
1095 && (bitmap_empty_p (argmap
) || bitmap_bit_p (argmap
, argno
)))
1097 arg
= next_const_call_expr_arg (&iter
);
1098 if (!validate_arg (arg
, code
) || integer_zerop (arg
))
1104 /* If no parameters remain or the parameter's code does not
1105 match the specified code, return false. Otherwise continue
1106 checking any remaining arguments. */
1107 arg
= next_const_call_expr_arg (&iter
);
1108 if (!validate_arg (arg
, code
))
1114 /* We need gotos here since we can only have one VA_CLOSE in a
1119 BITMAP_FREE (argmap
);
1124 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1125 and the address of the save area. */
1128 expand_builtin_nonlocal_goto (tree exp
)
1130 tree t_label
, t_save_area
;
1131 rtx r_label
, r_save_area
, r_fp
, r_sp
;
1134 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1137 t_label
= CALL_EXPR_ARG (exp
, 0);
1138 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1140 r_label
= expand_normal (t_label
);
1141 r_label
= convert_memory_address (Pmode
, r_label
);
1142 r_save_area
= expand_normal (t_save_area
);
1143 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1144 /* Copy the address of the save location to a register just in case it was
1145 based on the frame pointer. */
1146 r_save_area
= copy_to_reg (r_save_area
);
1147 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1148 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1149 plus_constant (Pmode
, r_save_area
,
1150 GET_MODE_SIZE (Pmode
)));
1152 crtl
->has_nonlocal_goto
= 1;
1154 /* ??? We no longer need to pass the static chain value, afaik. */
1155 if (targetm
.have_nonlocal_goto ())
1156 emit_insn (targetm
.gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1159 r_label
= copy_to_reg (r_label
);
1161 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1162 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1164 /* Restore frame pointer for containing function. */
1165 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1166 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1168 /* USE of hard_frame_pointer_rtx added for consistency;
1169 not clear if really needed. */
1170 emit_use (hard_frame_pointer_rtx
);
1171 emit_use (stack_pointer_rtx
);
1173 /* If the architecture is using a GP register, we must
1174 conservatively assume that the target function makes use of it.
1175 The prologue of functions with nonlocal gotos must therefore
1176 initialize the GP register to the appropriate value, and we
1177 must then make sure that this value is live at the point
1178 of the jump. (Note that this doesn't necessarily apply
1179 to targets with a nonlocal_goto pattern; they are free
1180 to implement it in their own way. Note also that this is
1181 a no-op if the GP register is a global invariant.) */
1182 unsigned regnum
= PIC_OFFSET_TABLE_REGNUM
;
1183 if (regnum
!= INVALID_REGNUM
&& fixed_regs
[regnum
])
1184 emit_use (pic_offset_table_rtx
);
1186 emit_indirect_jump (r_label
);
1189 /* Search backwards to the jump insn and mark it as a
1191 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1195 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1198 else if (CALL_P (insn
))
1205 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1206 (not all will be used on all machines) that was passed to __builtin_setjmp.
1207 It updates the stack pointer in that block to the current value. This is
1208 also called directly by the SJLJ exception handling code. */
1211 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1213 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1214 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
1216 = gen_rtx_MEM (sa_mode
,
1219 plus_constant (Pmode
, buf_addr
,
1220 2 * GET_MODE_SIZE (Pmode
))));
1222 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1225 /* Expand a call to __builtin_prefetch. For a target that does not support
1226 data prefetch, evaluate the memory address argument in case it has side
1230 expand_builtin_prefetch (tree exp
)
1232 tree arg0
, arg1
, arg2
;
1236 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1239 arg0
= CALL_EXPR_ARG (exp
, 0);
1241 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1242 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1244 nargs
= call_expr_nargs (exp
);
1246 arg1
= CALL_EXPR_ARG (exp
, 1);
1248 arg1
= integer_zero_node
;
1250 arg2
= CALL_EXPR_ARG (exp
, 2);
1252 arg2
= integer_three_node
;
1254 /* Argument 0 is an address. */
1255 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1257 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1258 if (TREE_CODE (arg1
) != INTEGER_CST
)
1260 error ("second argument to %<__builtin_prefetch%> must be a constant");
1261 arg1
= integer_zero_node
;
1263 op1
= expand_normal (arg1
);
1264 /* Argument 1 must be either zero or one. */
1265 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1267 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1272 /* Argument 2 (locality) must be a compile-time constant int. */
1273 if (TREE_CODE (arg2
) != INTEGER_CST
)
1275 error ("third argument to %<__builtin_prefetch%> must be a constant");
1276 arg2
= integer_zero_node
;
1278 op2
= expand_normal (arg2
);
1279 /* Argument 2 must be 0, 1, 2, or 3. */
1280 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1282 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1286 if (targetm
.have_prefetch ())
1288 struct expand_operand ops
[3];
1290 create_address_operand (&ops
[0], op0
);
1291 create_integer_operand (&ops
[1], INTVAL (op1
));
1292 create_integer_operand (&ops
[2], INTVAL (op2
));
1293 if (maybe_expand_insn (targetm
.code_for_prefetch
, 3, ops
))
1297 /* Don't do anything with direct references to volatile memory, but
1298 generate code to handle other side effects. */
1299 if (!MEM_P (op0
) && side_effects_p (op0
))
1303 /* Get a MEM rtx for expression EXP which is the address of an operand
1304 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1305 the maximum length of the block of memory that might be accessed or
1309 get_memory_rtx (tree exp
, tree len
)
1311 tree orig_exp
= exp
;
1314 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1315 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1316 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1317 exp
= TREE_OPERAND (exp
, 0);
1319 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1320 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1322 /* Get an expression we can use to find the attributes to assign to MEM.
1323 First remove any nops. */
1324 while (CONVERT_EXPR_P (exp
)
1325 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1326 exp
= TREE_OPERAND (exp
, 0);
1328 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1329 (as builtin stringops may alias with anything). */
1330 exp
= fold_build2 (MEM_REF
,
1331 build_array_type (char_type_node
,
1332 build_range_type (sizetype
,
1333 size_one_node
, len
)),
1334 exp
, build_int_cst (ptr_type_node
, 0));
1336 /* If the MEM_REF has no acceptable address, try to get the base object
1337 from the original address we got, and build an all-aliasing
1338 unknown-sized access to that one. */
1339 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1340 set_mem_attributes (mem
, exp
, 0);
1341 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1342 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1345 exp
= build_fold_addr_expr (exp
);
1346 exp
= fold_build2 (MEM_REF
,
1347 build_array_type (char_type_node
,
1348 build_range_type (sizetype
,
1351 exp
, build_int_cst (ptr_type_node
, 0));
1352 set_mem_attributes (mem
, exp
, 0);
1354 set_mem_alias_set (mem
, 0);
1358 /* Built-in functions to perform an untyped call and return. */
1360 #define apply_args_mode \
1361 (this_target_builtins->x_apply_args_mode)
1362 #define apply_result_mode \
1363 (this_target_builtins->x_apply_result_mode)
1365 /* Return the size required for the block returned by __builtin_apply_args,
1366 and initialize apply_args_mode. */
1369 apply_args_size (void)
1371 static int size
= -1;
1375 /* The values computed by this function never change. */
1378 /* The first value is the incoming arg-pointer. */
1379 size
= GET_MODE_SIZE (Pmode
);
1381 /* The second value is the structure value address unless this is
1382 passed as an "invisible" first argument. */
1383 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1384 size
+= GET_MODE_SIZE (Pmode
);
1386 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1387 if (FUNCTION_ARG_REGNO_P (regno
))
1389 fixed_size_mode mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1391 gcc_assert (mode
!= VOIDmode
);
1393 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1394 if (size
% align
!= 0)
1395 size
= CEIL (size
, align
) * align
;
1396 size
+= GET_MODE_SIZE (mode
);
1397 apply_args_mode
[regno
] = mode
;
1401 apply_args_mode
[regno
] = as_a
<fixed_size_mode
> (VOIDmode
);
1407 /* Return the size required for the block returned by __builtin_apply,
1408 and initialize apply_result_mode. */
1411 apply_result_size (void)
1413 static int size
= -1;
1416 /* The values computed by this function never change. */
1421 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1422 if (targetm
.calls
.function_value_regno_p (regno
))
1424 fixed_size_mode mode
= targetm
.calls
.get_raw_result_mode (regno
);
1426 gcc_assert (mode
!= VOIDmode
);
1428 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1429 if (size
% align
!= 0)
1430 size
= CEIL (size
, align
) * align
;
1431 size
+= GET_MODE_SIZE (mode
);
1432 apply_result_mode
[regno
] = mode
;
1435 apply_result_mode
[regno
] = as_a
<fixed_size_mode
> (VOIDmode
);
1437 /* Allow targets that use untyped_call and untyped_return to override
1438 the size so that machine-specific information can be stored here. */
1439 #ifdef APPLY_RESULT_SIZE
1440 size
= APPLY_RESULT_SIZE
;
1446 /* Create a vector describing the result block RESULT. If SAVEP is true,
1447 the result block is used to save the values; otherwise it is used to
1448 restore the values. */
1451 result_vector (int savep
, rtx result
)
1453 int regno
, size
, align
, nelts
;
1454 fixed_size_mode mode
;
1456 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1459 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1460 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1462 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1463 if (size
% align
!= 0)
1464 size
= CEIL (size
, align
) * align
;
1465 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1466 mem
= adjust_address (result
, mode
, size
);
1467 savevec
[nelts
++] = (savep
1468 ? gen_rtx_SET (mem
, reg
)
1469 : gen_rtx_SET (reg
, mem
));
1470 size
+= GET_MODE_SIZE (mode
);
1472 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1475 /* Save the state required to perform an untyped call with the same
1476 arguments as were passed to the current function. */
1479 expand_builtin_apply_args_1 (void)
1482 int size
, align
, regno
;
1483 fixed_size_mode mode
;
1484 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1486 /* Create a block where the arg-pointer, structure value address,
1487 and argument registers can be saved. */
1488 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1490 /* Walk past the arg-pointer and structure value address. */
1491 size
= GET_MODE_SIZE (Pmode
);
1492 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1493 size
+= GET_MODE_SIZE (Pmode
);
1495 /* Save each register used in calling a function to the block. */
1496 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1497 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1499 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1500 if (size
% align
!= 0)
1501 size
= CEIL (size
, align
) * align
;
1503 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1505 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1506 size
+= GET_MODE_SIZE (mode
);
1509 /* Save the arg pointer to the block. */
1510 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1511 /* We need the pointer as the caller actually passed them to us, not
1512 as we might have pretended they were passed. Make sure it's a valid
1513 operand, as emit_move_insn isn't expected to handle a PLUS. */
1514 if (STACK_GROWS_DOWNWARD
)
1516 = force_operand (plus_constant (Pmode
, tem
,
1517 crtl
->args
.pretend_args_size
),
1519 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1521 size
= GET_MODE_SIZE (Pmode
);
1523 /* Save the structure value address unless this is passed as an
1524 "invisible" first argument. */
1525 if (struct_incoming_value
)
1527 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1528 copy_to_reg (struct_incoming_value
));
1529 size
+= GET_MODE_SIZE (Pmode
);
1532 /* Return the address of the block. */
1533 return copy_addr_to_reg (XEXP (registers
, 0));
1536 /* __builtin_apply_args returns block of memory allocated on
1537 the stack into which is stored the arg pointer, structure
1538 value address, static chain, and all the registers that might
1539 possibly be used in performing a function call. The code is
1540 moved to the start of the function so the incoming values are
1544 expand_builtin_apply_args (void)
1546 /* Don't do __builtin_apply_args more than once in a function.
1547 Save the result of the first call and reuse it. */
1548 if (apply_args_value
!= 0)
1549 return apply_args_value
;
1551 /* When this function is called, it means that registers must be
1552 saved on entry to this function. So we migrate the
1553 call to the first insn of this function. */
1557 temp
= expand_builtin_apply_args_1 ();
1558 rtx_insn
*seq
= get_insns ();
1561 apply_args_value
= temp
;
1563 /* Put the insns after the NOTE that starts the function.
1564 If this is inside a start_sequence, make the outer-level insn
1565 chain current, so the code is placed at the start of the
1566 function. If internal_arg_pointer is a non-virtual pseudo,
1567 it needs to be placed after the function that initializes
1569 push_topmost_sequence ();
1570 if (REG_P (crtl
->args
.internal_arg_pointer
)
1571 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1572 emit_insn_before (seq
, parm_birth_insn
);
1574 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1575 pop_topmost_sequence ();
1580 /* Perform an untyped call and save the state required to perform an
1581 untyped return of whatever value was returned by the given function. */
1584 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1586 int size
, align
, regno
;
1587 fixed_size_mode mode
;
1588 rtx incoming_args
, result
, reg
, dest
, src
;
1589 rtx_call_insn
*call_insn
;
1590 rtx old_stack_level
= 0;
1591 rtx call_fusage
= 0;
1592 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1594 arguments
= convert_memory_address (Pmode
, arguments
);
1596 /* Create a block where the return registers can be saved. */
1597 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1599 /* Fetch the arg pointer from the ARGUMENTS block. */
1600 incoming_args
= gen_reg_rtx (Pmode
);
1601 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1602 if (!STACK_GROWS_DOWNWARD
)
1603 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1604 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1606 /* Push a new argument block and copy the arguments. Do not allow
1607 the (potential) memcpy call below to interfere with our stack
1609 do_pending_stack_adjust ();
1612 /* Save the stack with nonlocal if available. */
1613 if (targetm
.have_save_stack_nonlocal ())
1614 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1616 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1618 /* Allocate a block of memory onto the stack and copy the memory
1619 arguments to the outgoing arguments address. We can pass TRUE
1620 as the 4th argument because we just saved the stack pointer
1621 and will restore it right after the call. */
1622 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, -1, true);
1624 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1625 may have already set current_function_calls_alloca to true.
1626 current_function_calls_alloca won't be set if argsize is zero,
1627 so we have to guarantee need_drap is true here. */
1628 if (SUPPORTS_STACK_ALIGNMENT
)
1629 crtl
->need_drap
= true;
1631 dest
= virtual_outgoing_args_rtx
;
1632 if (!STACK_GROWS_DOWNWARD
)
1634 if (CONST_INT_P (argsize
))
1635 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1637 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1639 dest
= gen_rtx_MEM (BLKmode
, dest
);
1640 set_mem_align (dest
, PARM_BOUNDARY
);
1641 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1642 set_mem_align (src
, PARM_BOUNDARY
);
1643 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1645 /* Refer to the argument block. */
1647 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1648 set_mem_align (arguments
, PARM_BOUNDARY
);
1650 /* Walk past the arg-pointer and structure value address. */
1651 size
= GET_MODE_SIZE (Pmode
);
1653 size
+= GET_MODE_SIZE (Pmode
);
1655 /* Restore each of the registers previously saved. Make USE insns
1656 for each of these registers for use in making the call. */
1657 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1658 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1660 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1661 if (size
% align
!= 0)
1662 size
= CEIL (size
, align
) * align
;
1663 reg
= gen_rtx_REG (mode
, regno
);
1664 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1665 use_reg (&call_fusage
, reg
);
1666 size
+= GET_MODE_SIZE (mode
);
1669 /* Restore the structure value address unless this is passed as an
1670 "invisible" first argument. */
1671 size
= GET_MODE_SIZE (Pmode
);
1674 rtx value
= gen_reg_rtx (Pmode
);
1675 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1676 emit_move_insn (struct_value
, value
);
1677 if (REG_P (struct_value
))
1678 use_reg (&call_fusage
, struct_value
);
1679 size
+= GET_MODE_SIZE (Pmode
);
1682 /* All arguments and registers used for the call are set up by now! */
1683 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1685 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1686 and we don't want to load it into a register as an optimization,
1687 because prepare_call_address already did it if it should be done. */
1688 if (GET_CODE (function
) != SYMBOL_REF
)
1689 function
= memory_address (FUNCTION_MODE
, function
);
1691 /* Generate the actual call instruction and save the return value. */
1692 if (targetm
.have_untyped_call ())
1694 rtx mem
= gen_rtx_MEM (FUNCTION_MODE
, function
);
1695 emit_call_insn (targetm
.gen_untyped_call (mem
, result
,
1696 result_vector (1, result
)));
1698 else if (targetm
.have_call_value ())
1702 /* Locate the unique return register. It is not possible to
1703 express a call that sets more than one return register using
1704 call_value; use untyped_call for that. In fact, untyped_call
1705 only needs to save the return registers in the given block. */
1706 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1707 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1709 gcc_assert (!valreg
); /* have_untyped_call required. */
1711 valreg
= gen_rtx_REG (mode
, regno
);
1714 emit_insn (targetm
.gen_call_value (valreg
,
1715 gen_rtx_MEM (FUNCTION_MODE
, function
),
1716 const0_rtx
, NULL_RTX
, const0_rtx
));
1718 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1723 /* Find the CALL insn we just emitted, and attach the register usage
1725 call_insn
= last_call_insn ();
1726 add_function_usage_to (call_insn
, call_fusage
);
1728 /* Restore the stack. */
1729 if (targetm
.have_save_stack_nonlocal ())
1730 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1732 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1733 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1737 /* Return the address of the result block. */
1738 result
= copy_addr_to_reg (XEXP (result
, 0));
1739 return convert_memory_address (ptr_mode
, result
);
1742 /* Perform an untyped return. */
1745 expand_builtin_return (rtx result
)
1747 int size
, align
, regno
;
1748 fixed_size_mode mode
;
1750 rtx_insn
*call_fusage
= 0;
1752 result
= convert_memory_address (Pmode
, result
);
1754 apply_result_size ();
1755 result
= gen_rtx_MEM (BLKmode
, result
);
1757 if (targetm
.have_untyped_return ())
1759 rtx vector
= result_vector (0, result
);
1760 emit_jump_insn (targetm
.gen_untyped_return (result
, vector
));
1765 /* Restore the return value and note that each value is used. */
1767 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1768 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1770 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1771 if (size
% align
!= 0)
1772 size
= CEIL (size
, align
) * align
;
1773 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1774 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1776 push_to_sequence (call_fusage
);
1778 call_fusage
= get_insns ();
1780 size
+= GET_MODE_SIZE (mode
);
1783 /* Put the USE insns before the return. */
1784 emit_insn (call_fusage
);
1786 /* Return whatever values was restored by jumping directly to the end
1788 expand_naked_return ();
1791 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1793 static enum type_class
1794 type_to_class (tree type
)
1796 switch (TREE_CODE (type
))
1798 case VOID_TYPE
: return void_type_class
;
1799 case INTEGER_TYPE
: return integer_type_class
;
1800 case ENUMERAL_TYPE
: return enumeral_type_class
;
1801 case BOOLEAN_TYPE
: return boolean_type_class
;
1802 case POINTER_TYPE
: return pointer_type_class
;
1803 case REFERENCE_TYPE
: return reference_type_class
;
1804 case OFFSET_TYPE
: return offset_type_class
;
1805 case REAL_TYPE
: return real_type_class
;
1806 case COMPLEX_TYPE
: return complex_type_class
;
1807 case FUNCTION_TYPE
: return function_type_class
;
1808 case METHOD_TYPE
: return method_type_class
;
1809 case RECORD_TYPE
: return record_type_class
;
1811 case QUAL_UNION_TYPE
: return union_type_class
;
1812 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1813 ? string_type_class
: array_type_class
);
1814 case LANG_TYPE
: return lang_type_class
;
1815 default: return no_type_class
;
1819 /* Expand a call EXP to __builtin_classify_type. */
1822 expand_builtin_classify_type (tree exp
)
1824 if (call_expr_nargs (exp
))
1825 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1826 return GEN_INT (no_type_class
);
1829 /* This helper macro, meant to be used in mathfn_built_in below, determines
1830 which among a set of builtin math functions is appropriate for a given type
1831 mode. The `F' (float) and `L' (long double) are automatically generated
1832 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1833 types, there are additional types that are considered with 'F32', 'F64',
1834 'F128', etc. suffixes. */
1835 #define CASE_MATHFN(MATHFN) \
1836 CASE_CFN_##MATHFN: \
1837 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1838 fcodel = BUILT_IN_##MATHFN##L ; break;
1839 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1841 #define CASE_MATHFN_FLOATN(MATHFN) \
1842 CASE_CFN_##MATHFN: \
1843 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1844 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1845 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1846 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1847 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1849 /* Similar to above, but appends _R after any F/L suffix. */
1850 #define CASE_MATHFN_REENT(MATHFN) \
1851 case CFN_BUILT_IN_##MATHFN##_R: \
1852 case CFN_BUILT_IN_##MATHFN##F_R: \
1853 case CFN_BUILT_IN_##MATHFN##L_R: \
1854 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1855 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1857 /* Return a function equivalent to FN but operating on floating-point
1858 values of type TYPE, or END_BUILTINS if no such function exists.
1859 This is purely an operation on function codes; it does not guarantee
1860 that the target actually has an implementation of the function. */
1862 static built_in_function
1863 mathfn_built_in_2 (tree type
, combined_fn fn
)
1866 built_in_function fcode
, fcodef
, fcodel
;
1867 built_in_function fcodef16
= END_BUILTINS
;
1868 built_in_function fcodef32
= END_BUILTINS
;
1869 built_in_function fcodef64
= END_BUILTINS
;
1870 built_in_function fcodef128
= END_BUILTINS
;
1871 built_in_function fcodef32x
= END_BUILTINS
;
1872 built_in_function fcodef64x
= END_BUILTINS
;
1873 built_in_function fcodef128x
= END_BUILTINS
;
1885 CASE_MATHFN_FLOATN (CEIL
)
1887 CASE_MATHFN_FLOATN (COPYSIGN
)
1899 CASE_MATHFN_FLOATN (FLOOR
)
1900 CASE_MATHFN_FLOATN (FMA
)
1901 CASE_MATHFN_FLOATN (FMAX
)
1902 CASE_MATHFN_FLOATN (FMIN
)
1906 CASE_MATHFN_REENT (GAMMA
) /* GAMMA_R */
1907 CASE_MATHFN (HUGE_VAL
)
1911 CASE_MATHFN (IFLOOR
)
1914 CASE_MATHFN (IROUND
)
1921 CASE_MATHFN (LFLOOR
)
1922 CASE_MATHFN (LGAMMA
)
1923 CASE_MATHFN_REENT (LGAMMA
) /* LGAMMA_R */
1924 CASE_MATHFN (LLCEIL
)
1925 CASE_MATHFN (LLFLOOR
)
1926 CASE_MATHFN (LLRINT
)
1927 CASE_MATHFN (LLROUND
)
1934 CASE_MATHFN (LROUND
)
1938 CASE_MATHFN_FLOATN (NEARBYINT
)
1939 CASE_MATHFN (NEXTAFTER
)
1940 CASE_MATHFN (NEXTTOWARD
)
1944 CASE_MATHFN (REMAINDER
)
1945 CASE_MATHFN (REMQUO
)
1946 CASE_MATHFN_FLOATN (RINT
)
1947 CASE_MATHFN_FLOATN (ROUND
)
1949 CASE_MATHFN (SCALBLN
)
1950 CASE_MATHFN (SCALBN
)
1951 CASE_MATHFN (SIGNBIT
)
1952 CASE_MATHFN (SIGNIFICAND
)
1954 CASE_MATHFN (SINCOS
)
1956 CASE_MATHFN_FLOATN (SQRT
)
1959 CASE_MATHFN (TGAMMA
)
1960 CASE_MATHFN_FLOATN (TRUNC
)
1966 return END_BUILTINS
;
1969 mtype
= TYPE_MAIN_VARIANT (type
);
1970 if (mtype
== double_type_node
)
1972 else if (mtype
== float_type_node
)
1974 else if (mtype
== long_double_type_node
)
1976 else if (mtype
== float16_type_node
)
1978 else if (mtype
== float32_type_node
)
1980 else if (mtype
== float64_type_node
)
1982 else if (mtype
== float128_type_node
)
1984 else if (mtype
== float32x_type_node
)
1986 else if (mtype
== float64x_type_node
)
1988 else if (mtype
== float128x_type_node
)
1991 return END_BUILTINS
;
1994 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1995 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1996 otherwise use the explicit declaration. If we can't do the conversion,
2000 mathfn_built_in_1 (tree type
, combined_fn fn
, bool implicit_p
)
2002 built_in_function fcode2
= mathfn_built_in_2 (type
, fn
);
2003 if (fcode2
== END_BUILTINS
)
2006 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
2009 return builtin_decl_explicit (fcode2
);
2012 /* Like mathfn_built_in_1, but always use the implicit array. */
2015 mathfn_built_in (tree type
, combined_fn fn
)
2017 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
2020 /* Like mathfn_built_in_1, but take a built_in_function and
2021 always use the implicit array. */
2024 mathfn_built_in (tree type
, enum built_in_function fn
)
2026 return mathfn_built_in_1 (type
, as_combined_fn (fn
), /*implicit=*/ 1);
2029 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2030 return its code, otherwise return IFN_LAST. Note that this function
2031 only tests whether the function is defined in internals.def, not whether
2032 it is actually available on the target. */
2035 associated_internal_fn (tree fndecl
)
2037 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
);
2038 tree return_type
= TREE_TYPE (TREE_TYPE (fndecl
));
2039 switch (DECL_FUNCTION_CODE (fndecl
))
2041 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2042 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2043 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2044 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2045 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2046 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2047 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2048 #include "internal-fn.def"
2050 CASE_FLT_FN (BUILT_IN_POW10
):
2053 CASE_FLT_FN (BUILT_IN_DREM
):
2054 return IFN_REMAINDER
;
2056 CASE_FLT_FN (BUILT_IN_SCALBN
):
2057 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2058 if (REAL_MODE_FORMAT (TYPE_MODE (return_type
))->b
== 2)
2067 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2068 on the current target by a call to an internal function, return the
2069 code of that internal function, otherwise return IFN_LAST. The caller
2070 is responsible for ensuring that any side-effects of the built-in
2071 call are dealt with correctly. E.g. if CALL sets errno, the caller
2072 must decide that the errno result isn't needed or make it available
2073 in some other way. */
2076 replacement_internal_fn (gcall
*call
)
2078 if (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
))
2080 internal_fn ifn
= associated_internal_fn (gimple_call_fndecl (call
));
2081 if (ifn
!= IFN_LAST
)
2083 tree_pair types
= direct_internal_fn_types (ifn
, call
);
2084 optimization_type opt_type
= bb_optimization_type (gimple_bb (call
));
2085 if (direct_internal_fn_supported_p (ifn
, types
, opt_type
))
2092 /* Expand a call to the builtin trinary math functions (fma).
2093 Return NULL_RTX if a normal call should be emitted rather than expanding the
2094 function in-line. EXP is the expression that is a call to the builtin
2095 function; if convenient, the result should be placed in TARGET.
2096 SUBTARGET may be used as the target for computing one of EXP's
2100 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
2102 optab builtin_optab
;
2103 rtx op0
, op1
, op2
, result
;
2105 tree fndecl
= get_callee_fndecl (exp
);
2106 tree arg0
, arg1
, arg2
;
2109 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2112 arg0
= CALL_EXPR_ARG (exp
, 0);
2113 arg1
= CALL_EXPR_ARG (exp
, 1);
2114 arg2
= CALL_EXPR_ARG (exp
, 2);
2116 switch (DECL_FUNCTION_CODE (fndecl
))
2118 CASE_FLT_FN (BUILT_IN_FMA
):
2119 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA
):
2120 builtin_optab
= fma_optab
; break;
2125 /* Make a suitable register to place result in. */
2126 mode
= TYPE_MODE (TREE_TYPE (exp
));
2128 /* Before working hard, check whether the instruction is available. */
2129 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2132 result
= gen_reg_rtx (mode
);
2134 /* Always stabilize the argument list. */
2135 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2136 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2137 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2139 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2140 op1
= expand_normal (arg1
);
2141 op2
= expand_normal (arg2
);
2145 /* Compute into RESULT.
2146 Set RESULT to wherever the result comes back. */
2147 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2150 /* If we were unable to expand via the builtin, stop the sequence
2151 (without outputting the insns) and call to the library function
2152 with the stabilized argument list. */
2156 return expand_call (exp
, target
, target
== const0_rtx
);
2159 /* Output the entire sequence. */
2160 insns
= get_insns ();
2167 /* Expand a call to the builtin sin and cos math functions.
2168 Return NULL_RTX if a normal call should be emitted rather than expanding the
2169 function in-line. EXP is the expression that is a call to the builtin
2170 function; if convenient, the result should be placed in TARGET.
2171 SUBTARGET may be used as the target for computing one of EXP's
2175 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2177 optab builtin_optab
;
2180 tree fndecl
= get_callee_fndecl (exp
);
2184 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2187 arg
= CALL_EXPR_ARG (exp
, 0);
2189 switch (DECL_FUNCTION_CODE (fndecl
))
2191 CASE_FLT_FN (BUILT_IN_SIN
):
2192 CASE_FLT_FN (BUILT_IN_COS
):
2193 builtin_optab
= sincos_optab
; break;
2198 /* Make a suitable register to place result in. */
2199 mode
= TYPE_MODE (TREE_TYPE (exp
));
2201 /* Check if sincos insn is available, otherwise fallback
2202 to sin or cos insn. */
2203 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2204 switch (DECL_FUNCTION_CODE (fndecl
))
2206 CASE_FLT_FN (BUILT_IN_SIN
):
2207 builtin_optab
= sin_optab
; break;
2208 CASE_FLT_FN (BUILT_IN_COS
):
2209 builtin_optab
= cos_optab
; break;
2214 /* Before working hard, check whether the instruction is available. */
2215 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2217 rtx result
= gen_reg_rtx (mode
);
2219 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2220 need to expand the argument again. This way, we will not perform
2221 side-effects more the once. */
2222 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2224 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2228 /* Compute into RESULT.
2229 Set RESULT to wherever the result comes back. */
2230 if (builtin_optab
== sincos_optab
)
2234 switch (DECL_FUNCTION_CODE (fndecl
))
2236 CASE_FLT_FN (BUILT_IN_SIN
):
2237 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2239 CASE_FLT_FN (BUILT_IN_COS
):
2240 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2248 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2252 /* Output the entire sequence. */
2253 insns
= get_insns ();
2259 /* If we were unable to expand via the builtin, stop the sequence
2260 (without outputting the insns) and call to the library function
2261 with the stabilized argument list. */
2265 return expand_call (exp
, target
, target
== const0_rtx
);
2268 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2269 return an RTL instruction code that implements the functionality.
2270 If that isn't possible or available return CODE_FOR_nothing. */
2272 static enum insn_code
2273 interclass_mathfn_icode (tree arg
, tree fndecl
)
2275 bool errno_set
= false;
2276 optab builtin_optab
= unknown_optab
;
2279 switch (DECL_FUNCTION_CODE (fndecl
))
2281 CASE_FLT_FN (BUILT_IN_ILOGB
):
2282 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2283 CASE_FLT_FN (BUILT_IN_ISINF
):
2284 builtin_optab
= isinf_optab
; break;
2285 case BUILT_IN_ISNORMAL
:
2286 case BUILT_IN_ISFINITE
:
2287 CASE_FLT_FN (BUILT_IN_FINITE
):
2288 case BUILT_IN_FINITED32
:
2289 case BUILT_IN_FINITED64
:
2290 case BUILT_IN_FINITED128
:
2291 case BUILT_IN_ISINFD32
:
2292 case BUILT_IN_ISINFD64
:
2293 case BUILT_IN_ISINFD128
:
2294 /* These builtins have no optabs (yet). */
2300 /* There's no easy way to detect the case we need to set EDOM. */
2301 if (flag_errno_math
&& errno_set
)
2302 return CODE_FOR_nothing
;
2304 /* Optab mode depends on the mode of the input argument. */
2305 mode
= TYPE_MODE (TREE_TYPE (arg
));
2308 return optab_handler (builtin_optab
, mode
);
2309 return CODE_FOR_nothing
;
2312 /* Expand a call to one of the builtin math functions that operate on
2313 floating point argument and output an integer result (ilogb, isinf,
2315 Return 0 if a normal call should be emitted rather than expanding the
2316 function in-line. EXP is the expression that is a call to the builtin
2317 function; if convenient, the result should be placed in TARGET. */
2320 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2322 enum insn_code icode
= CODE_FOR_nothing
;
2324 tree fndecl
= get_callee_fndecl (exp
);
2328 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2331 arg
= CALL_EXPR_ARG (exp
, 0);
2332 icode
= interclass_mathfn_icode (arg
, fndecl
);
2333 mode
= TYPE_MODE (TREE_TYPE (arg
));
2335 if (icode
!= CODE_FOR_nothing
)
2337 struct expand_operand ops
[1];
2338 rtx_insn
*last
= get_last_insn ();
2339 tree orig_arg
= arg
;
2341 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2342 need to expand the argument again. This way, we will not perform
2343 side-effects more the once. */
2344 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2346 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2348 if (mode
!= GET_MODE (op0
))
2349 op0
= convert_to_mode (mode
, op0
, 0);
2351 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2352 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2353 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2354 return ops
[0].value
;
2356 delete_insns_since (last
);
2357 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2363 /* Expand a call to the builtin sincos math function.
2364 Return NULL_RTX if a normal call should be emitted rather than expanding the
2365 function in-line. EXP is the expression that is a call to the builtin
2369 expand_builtin_sincos (tree exp
)
2371 rtx op0
, op1
, op2
, target1
, target2
;
2373 tree arg
, sinp
, cosp
;
2375 location_t loc
= EXPR_LOCATION (exp
);
2376 tree alias_type
, alias_off
;
2378 if (!validate_arglist (exp
, REAL_TYPE
,
2379 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2382 arg
= CALL_EXPR_ARG (exp
, 0);
2383 sinp
= CALL_EXPR_ARG (exp
, 1);
2384 cosp
= CALL_EXPR_ARG (exp
, 2);
2386 /* Make a suitable register to place result in. */
2387 mode
= TYPE_MODE (TREE_TYPE (arg
));
2389 /* Check if sincos insn is available, otherwise emit the call. */
2390 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2393 target1
= gen_reg_rtx (mode
);
2394 target2
= gen_reg_rtx (mode
);
2396 op0
= expand_normal (arg
);
2397 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2398 alias_off
= build_int_cst (alias_type
, 0);
2399 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2401 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2404 /* Compute into target1 and target2.
2405 Set TARGET to wherever the result comes back. */
2406 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2407 gcc_assert (result
);
2409 /* Move target1 and target2 to the memory locations indicated
2411 emit_move_insn (op1
, target1
);
2412 emit_move_insn (op2
, target2
);
2417 /* Expand a call to the internal cexpi builtin to the sincos math function.
2418 EXP is the expression that is a call to the builtin function; if convenient,
2419 the result should be placed in TARGET. */
2422 expand_builtin_cexpi (tree exp
, rtx target
)
2424 tree fndecl
= get_callee_fndecl (exp
);
2428 location_t loc
= EXPR_LOCATION (exp
);
2430 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2433 arg
= CALL_EXPR_ARG (exp
, 0);
2434 type
= TREE_TYPE (arg
);
2435 mode
= TYPE_MODE (TREE_TYPE (arg
));
2437 /* Try expanding via a sincos optab, fall back to emitting a libcall
2438 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2439 is only generated from sincos, cexp or if we have either of them. */
2440 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2442 op1
= gen_reg_rtx (mode
);
2443 op2
= gen_reg_rtx (mode
);
2445 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2447 /* Compute into op1 and op2. */
2448 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2450 else if (targetm
.libc_has_function (function_sincos
))
2452 tree call
, fn
= NULL_TREE
;
2456 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2457 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2458 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2459 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2460 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2461 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2465 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2466 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2467 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2468 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2469 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2470 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2472 /* Make sure not to fold the sincos call again. */
2473 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2474 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2475 call
, 3, arg
, top1
, top2
));
2479 tree call
, fn
= NULL_TREE
, narg
;
2480 tree ctype
= build_complex_type (type
);
2482 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2483 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2484 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2485 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2486 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2487 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2491 /* If we don't have a decl for cexp create one. This is the
2492 friendliest fallback if the user calls __builtin_cexpi
2493 without full target C99 function support. */
2494 if (fn
== NULL_TREE
)
2497 const char *name
= NULL
;
2499 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2501 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2503 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2506 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2507 fn
= build_fn_decl (name
, fntype
);
2510 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2511 build_real (type
, dconst0
), arg
);
2513 /* Make sure not to fold the cexp call again. */
2514 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2515 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2516 target
, VOIDmode
, EXPAND_NORMAL
);
2519 /* Now build the proper return type. */
2520 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2521 make_tree (TREE_TYPE (arg
), op2
),
2522 make_tree (TREE_TYPE (arg
), op1
)),
2523 target
, VOIDmode
, EXPAND_NORMAL
);
2526 /* Conveniently construct a function call expression. FNDECL names the
2527 function to be called, N is the number of arguments, and the "..."
2528 parameters are the argument expressions. Unlike build_call_exr
2529 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2532 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2535 tree fntype
= TREE_TYPE (fndecl
);
2536 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2539 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2541 SET_EXPR_LOCATION (fn
, loc
);
2545 /* Expand a call to one of the builtin rounding functions gcc defines
2546 as an extension (lfloor and lceil). As these are gcc extensions we
2547 do not need to worry about setting errno to EDOM.
2548 If expanding via optab fails, lower expression to (int)(floor(x)).
2549 EXP is the expression that is a call to the builtin function;
2550 if convenient, the result should be placed in TARGET. */
2553 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2555 convert_optab builtin_optab
;
2558 tree fndecl
= get_callee_fndecl (exp
);
2559 enum built_in_function fallback_fn
;
2560 tree fallback_fndecl
;
2564 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2567 arg
= CALL_EXPR_ARG (exp
, 0);
2569 switch (DECL_FUNCTION_CODE (fndecl
))
2571 CASE_FLT_FN (BUILT_IN_ICEIL
):
2572 CASE_FLT_FN (BUILT_IN_LCEIL
):
2573 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2574 builtin_optab
= lceil_optab
;
2575 fallback_fn
= BUILT_IN_CEIL
;
2578 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2579 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2580 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2581 builtin_optab
= lfloor_optab
;
2582 fallback_fn
= BUILT_IN_FLOOR
;
2589 /* Make a suitable register to place result in. */
2590 mode
= TYPE_MODE (TREE_TYPE (exp
));
2592 target
= gen_reg_rtx (mode
);
2594 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2595 need to expand the argument again. This way, we will not perform
2596 side-effects more the once. */
2597 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2599 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2603 /* Compute into TARGET. */
2604 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2606 /* Output the entire sequence. */
2607 insns
= get_insns ();
2613 /* If we were unable to expand via the builtin, stop the sequence
2614 (without outputting the insns). */
2617 /* Fall back to floating point rounding optab. */
2618 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2620 /* For non-C99 targets we may end up without a fallback fndecl here
2621 if the user called __builtin_lfloor directly. In this case emit
2622 a call to the floor/ceil variants nevertheless. This should result
2623 in the best user experience for not full C99 targets. */
2624 if (fallback_fndecl
== NULL_TREE
)
2627 const char *name
= NULL
;
2629 switch (DECL_FUNCTION_CODE (fndecl
))
2631 case BUILT_IN_ICEIL
:
2632 case BUILT_IN_LCEIL
:
2633 case BUILT_IN_LLCEIL
:
2636 case BUILT_IN_ICEILF
:
2637 case BUILT_IN_LCEILF
:
2638 case BUILT_IN_LLCEILF
:
2641 case BUILT_IN_ICEILL
:
2642 case BUILT_IN_LCEILL
:
2643 case BUILT_IN_LLCEILL
:
2646 case BUILT_IN_IFLOOR
:
2647 case BUILT_IN_LFLOOR
:
2648 case BUILT_IN_LLFLOOR
:
2651 case BUILT_IN_IFLOORF
:
2652 case BUILT_IN_LFLOORF
:
2653 case BUILT_IN_LLFLOORF
:
2656 case BUILT_IN_IFLOORL
:
2657 case BUILT_IN_LFLOORL
:
2658 case BUILT_IN_LLFLOORL
:
2665 fntype
= build_function_type_list (TREE_TYPE (arg
),
2666 TREE_TYPE (arg
), NULL_TREE
);
2667 fallback_fndecl
= build_fn_decl (name
, fntype
);
2670 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2672 tmp
= expand_normal (exp
);
2673 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
2675 /* Truncate the result of floating point optab to integer
2676 via expand_fix (). */
2677 target
= gen_reg_rtx (mode
);
2678 expand_fix (target
, tmp
, 0);
2683 /* Expand a call to one of the builtin math functions doing integer
2685 Return 0 if a normal call should be emitted rather than expanding the
2686 function in-line. EXP is the expression that is a call to the builtin
2687 function; if convenient, the result should be placed in TARGET. */
2690 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2692 convert_optab builtin_optab
;
2695 tree fndecl
= get_callee_fndecl (exp
);
2698 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2700 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2703 arg
= CALL_EXPR_ARG (exp
, 0);
2705 switch (DECL_FUNCTION_CODE (fndecl
))
2707 CASE_FLT_FN (BUILT_IN_IRINT
):
2708 fallback_fn
= BUILT_IN_LRINT
;
2710 CASE_FLT_FN (BUILT_IN_LRINT
):
2711 CASE_FLT_FN (BUILT_IN_LLRINT
):
2712 builtin_optab
= lrint_optab
;
2715 CASE_FLT_FN (BUILT_IN_IROUND
):
2716 fallback_fn
= BUILT_IN_LROUND
;
2718 CASE_FLT_FN (BUILT_IN_LROUND
):
2719 CASE_FLT_FN (BUILT_IN_LLROUND
):
2720 builtin_optab
= lround_optab
;
2727 /* There's no easy way to detect the case we need to set EDOM. */
2728 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2731 /* Make a suitable register to place result in. */
2732 mode
= TYPE_MODE (TREE_TYPE (exp
));
2734 /* There's no easy way to detect the case we need to set EDOM. */
2735 if (!flag_errno_math
)
2737 rtx result
= gen_reg_rtx (mode
);
2739 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2740 need to expand the argument again. This way, we will not perform
2741 side-effects more the once. */
2742 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2744 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2748 if (expand_sfix_optab (result
, op0
, builtin_optab
))
2750 /* Output the entire sequence. */
2751 insns
= get_insns ();
2757 /* If we were unable to expand via the builtin, stop the sequence
2758 (without outputting the insns) and call to the library function
2759 with the stabilized argument list. */
2763 if (fallback_fn
!= BUILT_IN_NONE
)
2765 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2766 targets, (int) round (x) should never be transformed into
2767 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2768 a call to lround in the hope that the target provides at least some
2769 C99 functions. This should result in the best user experience for
2770 not full C99 targets. */
2771 tree fallback_fndecl
= mathfn_built_in_1
2772 (TREE_TYPE (arg
), as_combined_fn (fallback_fn
), 0);
2774 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2775 fallback_fndecl
, 1, arg
);
2777 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2778 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
2779 return convert_to_mode (mode
, target
, 0);
2782 return expand_call (exp
, target
, target
== const0_rtx
);
2785 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2786 a normal call should be emitted rather than expanding the function
2787 in-line. EXP is the expression that is a call to the builtin
2788 function; if convenient, the result should be placed in TARGET. */
2791 expand_builtin_powi (tree exp
, rtx target
)
2798 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2801 arg0
= CALL_EXPR_ARG (exp
, 0);
2802 arg1
= CALL_EXPR_ARG (exp
, 1);
2803 mode
= TYPE_MODE (TREE_TYPE (exp
));
2805 /* Emit a libcall to libgcc. */
2807 /* Mode of the 2nd argument must match that of an int. */
2808 mode2
= int_mode_for_size (INT_TYPE_SIZE
, 0).require ();
2810 if (target
== NULL_RTX
)
2811 target
= gen_reg_rtx (mode
);
2813 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2814 if (GET_MODE (op0
) != mode
)
2815 op0
= convert_to_mode (mode
, op0
, 0);
2816 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2817 if (GET_MODE (op1
) != mode2
)
2818 op1
= convert_to_mode (mode2
, op1
, 0);
2820 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
2821 target
, LCT_CONST
, mode
,
2822 op0
, mode
, op1
, mode2
);
2827 /* Expand expression EXP which is a call to the strlen builtin. Return
2828 NULL_RTX if we failed the caller should emit a normal call, otherwise
2829 try to get the result in TARGET, if convenient. */
2832 expand_builtin_strlen (tree exp
, rtx target
,
2833 machine_mode target_mode
)
2835 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
2838 struct expand_operand ops
[4];
2841 tree src
= CALL_EXPR_ARG (exp
, 0);
2843 rtx_insn
*before_strlen
;
2844 machine_mode insn_mode
;
2845 enum insn_code icode
= CODE_FOR_nothing
;
2848 /* If the length can be computed at compile-time, return it. */
2849 len
= c_strlen (src
, 0);
2851 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2853 /* If the length can be computed at compile-time and is constant
2854 integer, but there are side-effects in src, evaluate
2855 src for side-effects, then return len.
2856 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2857 can be optimized into: i++; x = 3; */
2858 len
= c_strlen (src
, 1);
2859 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
2861 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2862 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2865 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
2867 /* If SRC is not a pointer type, don't do this operation inline. */
2871 /* Bail out if we can't compute strlen in the right mode. */
2872 FOR_EACH_MODE_FROM (insn_mode
, target_mode
)
2874 icode
= optab_handler (strlen_optab
, insn_mode
);
2875 if (icode
!= CODE_FOR_nothing
)
2878 if (insn_mode
== VOIDmode
)
2881 /* Make a place to hold the source address. We will not expand
2882 the actual source until we are sure that the expansion will
2883 not fail -- there are trees that cannot be expanded twice. */
2884 src_reg
= gen_reg_rtx (Pmode
);
2886 /* Mark the beginning of the strlen sequence so we can emit the
2887 source operand later. */
2888 before_strlen
= get_last_insn ();
2890 create_output_operand (&ops
[0], target
, insn_mode
);
2891 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
2892 create_integer_operand (&ops
[2], 0);
2893 create_integer_operand (&ops
[3], align
);
2894 if (!maybe_expand_insn (icode
, 4, ops
))
2897 /* Check to see if the argument was declared attribute nonstring
2898 and if so, issue a warning since at this point it's not known
2899 to be nul-terminated. */
2900 maybe_warn_nonstring_arg (get_callee_fndecl (exp
), exp
);
2902 /* Now that we are assured of success, expand the source. */
2904 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
2907 #ifdef POINTERS_EXTEND_UNSIGNED
2908 if (GET_MODE (pat
) != Pmode
)
2909 pat
= convert_to_mode (Pmode
, pat
,
2910 POINTERS_EXTEND_UNSIGNED
);
2912 emit_move_insn (src_reg
, pat
);
2918 emit_insn_after (pat
, before_strlen
);
2920 emit_insn_before (pat
, get_insns ());
2922 /* Return the value in the proper mode for this function. */
2923 if (GET_MODE (ops
[0].value
) == target_mode
)
2924 target
= ops
[0].value
;
2925 else if (target
!= 0)
2926 convert_move (target
, ops
[0].value
, 0);
2928 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
2933 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2934 bytes from constant string DATA + OFFSET and return it as target
2938 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
2939 scalar_int_mode mode
)
2941 const char *str
= (const char *) data
;
2943 gcc_assert (offset
>= 0
2944 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
2945 <= strlen (str
) + 1));
2947 return c_readstr (str
+ offset
, mode
);
2950 /* LEN specify length of the block of memcpy/memset operation.
2951 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2952 In some cases we can make very likely guess on max size, then we
2953 set it into PROBABLE_MAX_SIZE. */
2956 determine_block_size (tree len
, rtx len_rtx
,
2957 unsigned HOST_WIDE_INT
*min_size
,
2958 unsigned HOST_WIDE_INT
*max_size
,
2959 unsigned HOST_WIDE_INT
*probable_max_size
)
2961 if (CONST_INT_P (len_rtx
))
2963 *min_size
= *max_size
= *probable_max_size
= UINTVAL (len_rtx
);
2969 enum value_range_type range_type
= VR_UNDEFINED
;
2971 /* Determine bounds from the type. */
2972 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len
))))
2973 *min_size
= tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len
)));
2976 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len
))))
2977 *probable_max_size
= *max_size
2978 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len
)));
2980 *probable_max_size
= *max_size
= GET_MODE_MASK (GET_MODE (len_rtx
));
2982 if (TREE_CODE (len
) == SSA_NAME
)
2983 range_type
= get_range_info (len
, &min
, &max
);
2984 if (range_type
== VR_RANGE
)
2986 if (wi::fits_uhwi_p (min
) && *min_size
< min
.to_uhwi ())
2987 *min_size
= min
.to_uhwi ();
2988 if (wi::fits_uhwi_p (max
) && *max_size
> max
.to_uhwi ())
2989 *probable_max_size
= *max_size
= max
.to_uhwi ();
2991 else if (range_type
== VR_ANTI_RANGE
)
2993 /* Anti range 0...N lets us to determine minimal size to N+1. */
2996 if (wi::fits_uhwi_p (max
) && max
.to_uhwi () + 1 != 0)
2997 *min_size
= max
.to_uhwi () + 1;
3005 Produce anti range allowing negative values of N. We still
3006 can use the information and make a guess that N is not negative.
3008 else if (!wi::leu_p (max
, 1 << 30) && wi::fits_uhwi_p (min
))
3009 *probable_max_size
= min
.to_uhwi () - 1;
3012 gcc_checking_assert (*max_size
<=
3013 (unsigned HOST_WIDE_INT
)
3014 GET_MODE_MASK (GET_MODE (len_rtx
)));
3017 /* Try to verify that the sizes and lengths of the arguments to a string
3018 manipulation function given by EXP are within valid bounds and that
3019 the operation does not lead to buffer overflow or read past the end.
3020 Arguments other than EXP may be null. When non-null, the arguments
3021 have the following meaning:
3022 DST is the destination of a copy call or NULL otherwise.
3023 SRC is the source of a copy call or NULL otherwise.
3024 DSTWRITE is the number of bytes written into the destination obtained
3025 from the user-supplied size argument to the function (such as in
3026 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3027 MAXREAD is the user-supplied bound on the length of the source sequence
3028 (such as in strncat(d, s, N). It specifies the upper limit on the number
3029 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3030 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3031 expression EXP is a string function call (as opposed to a memory call
3032 like memcpy). As an exception, SRCSTR can also be an integer denoting
3033 the precomputed size of the source string or object (for functions like
3035 DSTSIZE is the size of the destination object specified by the last
3036 argument to the _chk builtins, typically resulting from the expansion
3037 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3040 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3043 If the call is successfully verified as safe return true, otherwise
3047 check_access (tree exp
, tree
, tree
, tree dstwrite
,
3048 tree maxread
, tree srcstr
, tree dstsize
)
3050 int opt
= OPT_Wstringop_overflow_
;
3052 /* The size of the largest object is half the address space, or
3053 PTRDIFF_MAX. (This is way too permissive.) */
3054 tree maxobjsize
= max_object_size ();
3056 /* Either the length of the source string for string functions or
3057 the size of the source object for raw memory functions. */
3058 tree slen
= NULL_TREE
;
3060 tree range
[2] = { NULL_TREE
, NULL_TREE
};
3062 /* Set to true when the exact number of bytes written by a string
3063 function like strcpy is not known and the only thing that is
3064 known is that it must be at least one (for the terminating nul). */
3065 bool at_least_one
= false;
3068 /* SRCSTR is normally a pointer to string but as a special case
3069 it can be an integer denoting the length of a string. */
3070 if (POINTER_TYPE_P (TREE_TYPE (srcstr
)))
3072 /* Try to determine the range of lengths the source string
3073 refers to. If it can be determined and is less than
3074 the upper bound given by MAXREAD add one to it for
3075 the terminating nul. Otherwise, set it to one for
3076 the same reason, or to MAXREAD as appropriate. */
3077 get_range_strlen (srcstr
, range
);
3078 if (range
[0] && (!maxread
|| TREE_CODE (maxread
) == INTEGER_CST
))
3080 if (maxread
&& tree_int_cst_le (maxread
, range
[0]))
3081 range
[0] = range
[1] = maxread
;
3083 range
[0] = fold_build2 (PLUS_EXPR
, size_type_node
,
3084 range
[0], size_one_node
);
3086 if (maxread
&& tree_int_cst_le (maxread
, range
[1]))
3088 else if (!integer_all_onesp (range
[1]))
3089 range
[1] = fold_build2 (PLUS_EXPR
, size_type_node
,
3090 range
[1], size_one_node
);
3096 at_least_one
= true;
3097 slen
= size_one_node
;
3104 if (!dstwrite
&& !maxread
)
3106 /* When the only available piece of data is the object size
3107 there is nothing to do. */
3111 /* Otherwise, when the length of the source sequence is known
3112 (as with strlen), set DSTWRITE to it. */
3118 dstsize
= maxobjsize
;
3121 get_size_range (dstwrite
, range
);
3123 tree func
= get_callee_fndecl (exp
);
3125 /* First check the number of bytes to be written against the maximum
3127 if (range
[0] && tree_int_cst_lt (maxobjsize
, range
[0]))
3129 location_t loc
= tree_nonartificial_location (exp
);
3130 loc
= expansion_point_location_if_in_system_header (loc
);
3132 if (range
[0] == range
[1])
3133 warning_at (loc
, opt
,
3134 "%K%qD specified size %E "
3135 "exceeds maximum object size %E",
3136 exp
, func
, range
[0], maxobjsize
);
3138 warning_at (loc
, opt
,
3139 "%K%qD specified size between %E and %E "
3140 "exceeds maximum object size %E",
3142 range
[0], range
[1], maxobjsize
);
3146 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3147 constant, and in range of unsigned HOST_WIDE_INT. */
3148 bool exactwrite
= dstwrite
&& tree_fits_uhwi_p (dstwrite
);
3150 /* Next check the number of bytes to be written against the destination
3152 if (range
[0] || !exactwrite
|| integer_all_onesp (dstwrite
))
3155 && ((tree_fits_uhwi_p (dstsize
)
3156 && tree_int_cst_lt (dstsize
, range
[0]))
3157 || (tree_fits_uhwi_p (dstwrite
)
3158 && tree_int_cst_lt (dstwrite
, range
[0]))))
3160 if (TREE_NO_WARNING (exp
))
3163 location_t loc
= tree_nonartificial_location (exp
);
3164 loc
= expansion_point_location_if_in_system_header (loc
);
3166 if (dstwrite
== slen
&& at_least_one
)
3168 /* This is a call to strcpy with a destination of 0 size
3169 and a source of unknown length. The call will write
3170 at least one byte past the end of the destination. */
3171 warning_at (loc
, opt
,
3172 "%K%qD writing %E or more bytes into a region "
3173 "of size %E overflows the destination",
3174 exp
, func
, range
[0], dstsize
);
3176 else if (tree_int_cst_equal (range
[0], range
[1]))
3177 warning_n (loc
, opt
, tree_to_uhwi (range
[0]),
3178 "%K%qD writing %E byte into a region "
3179 "of size %E overflows the destination",
3180 "%K%qD writing %E bytes into a region "
3181 "of size %E overflows the destination",
3182 exp
, func
, range
[0], dstsize
);
3183 else if (tree_int_cst_sign_bit (range
[1]))
3185 /* Avoid printing the upper bound if it's invalid. */
3186 warning_at (loc
, opt
,
3187 "%K%qD writing %E or more bytes into a region "
3188 "of size %E overflows the destination",
3189 exp
, func
, range
[0], dstsize
);
3192 warning_at (loc
, opt
,
3193 "%K%qD writing between %E and %E bytes into "
3194 "a region of size %E overflows the destination",
3195 exp
, func
, range
[0], range
[1],
3198 /* Return error when an overflow has been detected. */
3203 /* Check the maximum length of the source sequence against the size
3204 of the destination object if known, or against the maximum size
3208 get_size_range (maxread
, range
);
3210 /* Use the lower end for MAXREAD from now on. */
3214 if (range
[0] && dstsize
&& tree_fits_uhwi_p (dstsize
))
3216 location_t loc
= tree_nonartificial_location (exp
);
3217 loc
= expansion_point_location_if_in_system_header (loc
);
3219 if (tree_int_cst_lt (maxobjsize
, range
[0]))
3221 if (TREE_NO_WARNING (exp
))
3224 /* Warn about crazy big sizes first since that's more
3225 likely to be meaningful than saying that the bound
3226 is greater than the object size if both are big. */
3227 if (range
[0] == range
[1])
3228 warning_at (loc
, opt
,
3229 "%K%qD specified bound %E "
3230 "exceeds maximum object size %E",
3232 range
[0], maxobjsize
);
3234 warning_at (loc
, opt
,
3235 "%K%qD specified bound between %E and %E "
3236 "exceeds maximum object size %E",
3238 range
[0], range
[1], maxobjsize
);
3243 if (dstsize
!= maxobjsize
&& tree_int_cst_lt (dstsize
, range
[0]))
3245 if (TREE_NO_WARNING (exp
))
3248 if (tree_int_cst_equal (range
[0], range
[1]))
3249 warning_at (loc
, opt
,
3250 "%K%qD specified bound %E "
3251 "exceeds destination size %E",
3255 warning_at (loc
, opt
,
3256 "%K%qD specified bound between %E and %E "
3257 "exceeds destination size %E",
3259 range
[0], range
[1], dstsize
);
3265 /* Check for reading past the end of SRC. */
3268 && dstwrite
&& range
[0]
3269 && tree_int_cst_lt (slen
, range
[0]))
3271 if (TREE_NO_WARNING (exp
))
3274 location_t loc
= tree_nonartificial_location (exp
);
3276 if (tree_int_cst_equal (range
[0], range
[1]))
3277 warning_n (loc
, opt
, tree_to_uhwi (range
[0]),
3278 "%K%qD reading %E byte from a region of size %E",
3279 "%K%qD reading %E bytes from a region of size %E",
3280 exp
, func
, range
[0], slen
);
3281 else if (tree_int_cst_sign_bit (range
[1]))
3283 /* Avoid printing the upper bound if it's invalid. */
3284 warning_at (loc
, opt
,
3285 "%K%qD reading %E or more bytes from a region "
3287 exp
, func
, range
[0], slen
);
3290 warning_at (loc
, opt
,
3291 "%K%qD reading between %E and %E bytes from a region "
3293 exp
, func
, range
[0], range
[1], slen
);
3300 /* Helper to compute the size of the object referenced by the DEST
3301 expression which must have pointer type, using Object Size type
3302 OSTYPE (only the least significant 2 bits are used). Return
3303 an estimate of the size of the object if successful or NULL when
3304 the size cannot be determined. When the referenced object involves
3305 a non-constant offset in some range the returned value represents
3306 the largest size given the smallest non-negative offset in the
3307 range. The function is intended for diagnostics and should not
3308 be used to influence code generation or optimization. */
3311 compute_objsize (tree dest
, int ostype
)
3313 unsigned HOST_WIDE_INT size
;
3315 /* Only the two least significant bits are meaningful. */
3318 if (compute_builtin_object_size (dest
, ostype
, &size
))
3319 return build_int_cst (sizetype
, size
);
3321 if (TREE_CODE (dest
) == SSA_NAME
)
3323 gimple
*stmt
= SSA_NAME_DEF_STMT (dest
);
3324 if (!is_gimple_assign (stmt
))
3327 dest
= gimple_assign_rhs1 (stmt
);
3329 tree_code code
= gimple_assign_rhs_code (stmt
);
3330 if (code
== POINTER_PLUS_EXPR
)
3332 /* compute_builtin_object_size fails for addresses with
3333 non-constant offsets. Try to determine the range of
3334 such an offset here and use it to adjus the constant
3336 tree off
= gimple_assign_rhs2 (stmt
);
3337 if (TREE_CODE (off
) == SSA_NAME
3338 && INTEGRAL_TYPE_P (TREE_TYPE (off
)))
3341 enum value_range_type rng
= get_range_info (off
, &min
, &max
);
3343 if (rng
== VR_RANGE
)
3345 if (tree size
= compute_objsize (dest
, ostype
))
3347 wide_int wisiz
= wi::to_wide (size
);
3349 /* Ignore negative offsets for now. For others,
3350 use the lower bound as the most optimistic
3351 estimate of the (remaining)size. */
3352 if (wi::sign_mask (min
))
3354 else if (wi::ltu_p (min
, wisiz
))
3355 return wide_int_to_tree (TREE_TYPE (size
),
3356 wi::sub (wisiz
, min
));
3358 return size_zero_node
;
3363 else if (code
!= ADDR_EXPR
)
3367 /* Unless computing the largest size (for memcpy and other raw memory
3368 functions), try to determine the size of the object from its type. */
3372 if (TREE_CODE (dest
) != ADDR_EXPR
)
3375 tree type
= TREE_TYPE (dest
);
3376 if (TREE_CODE (type
) == POINTER_TYPE
)
3377 type
= TREE_TYPE (type
);
3379 type
= TYPE_MAIN_VARIANT (type
);
3381 if (TREE_CODE (type
) == ARRAY_TYPE
3382 && !array_at_struct_end_p (TREE_OPERAND (dest
, 0)))
3384 /* Return the constant size unless it's zero (that's a zero-length
3385 array likely at the end of a struct). */
3386 tree size
= TYPE_SIZE_UNIT (type
);
3387 if (size
&& TREE_CODE (size
) == INTEGER_CST
3388 && !integer_zerop (size
))
3395 /* Helper to determine and check the sizes of the source and the destination
3396 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3397 call expression, DEST is the destination argument, SRC is the source
3398 argument or null, and LEN is the number of bytes. Use Object Size type-0
3399 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3400 (no overflow or invalid sizes), false otherwise. */
3403 check_memop_access (tree exp
, tree dest
, tree src
, tree size
)
3405 /* For functions like memset and memcpy that operate on raw memory
3406 try to determine the size of the largest source and destination
3407 object using type-0 Object Size regardless of the object size
3408 type specified by the option. */
3409 tree srcsize
= src
? compute_objsize (src
, 0) : NULL_TREE
;
3410 tree dstsize
= compute_objsize (dest
, 0);
3412 return check_access (exp
, dest
, src
, size
, /*maxread=*/NULL_TREE
,
3416 /* Validate memchr arguments without performing any expansion.
3420 expand_builtin_memchr (tree exp
, rtx
)
3422 if (!validate_arglist (exp
,
3423 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3426 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3427 tree len
= CALL_EXPR_ARG (exp
, 2);
3429 /* Diagnose calls where the specified length exceeds the size
3431 if (warn_stringop_overflow
)
3433 tree size
= compute_objsize (arg1
, 0);
3434 check_access (exp
, /*dst=*/NULL_TREE
, /*src=*/NULL_TREE
, len
,
3435 /*maxread=*/NULL_TREE
, size
, /*objsize=*/NULL_TREE
);
3441 /* Expand a call EXP to the memcpy builtin.
3442 Return NULL_RTX if we failed, the caller should emit a normal call,
3443 otherwise try to get the result in TARGET, if convenient (and in
3444 mode MODE if that's convenient). */
3447 expand_builtin_memcpy (tree exp
, rtx target
)
3449 if (!validate_arglist (exp
,
3450 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3453 tree dest
= CALL_EXPR_ARG (exp
, 0);
3454 tree src
= CALL_EXPR_ARG (exp
, 1);
3455 tree len
= CALL_EXPR_ARG (exp
, 2);
3457 check_memop_access (exp
, dest
, src
, len
);
3459 return expand_builtin_memory_copy_args (dest
, src
, len
, target
, exp
,
3463 /* Check a call EXP to the memmove built-in for validity.
3464 Return NULL_RTX on both success and failure. */
3467 expand_builtin_memmove (tree exp
, rtx
)
3469 if (!validate_arglist (exp
,
3470 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3473 tree dest
= CALL_EXPR_ARG (exp
, 0);
3474 tree src
= CALL_EXPR_ARG (exp
, 1);
3475 tree len
= CALL_EXPR_ARG (exp
, 2);
3477 check_memop_access (exp
, dest
, src
, len
);
3482 /* Expand an instrumented call EXP to the memcpy builtin.
3483 Return NULL_RTX if we failed, the caller should emit a normal call,
3484 otherwise try to get the result in TARGET, if convenient (and in
3485 mode MODE if that's convenient). */
3488 expand_builtin_memcpy_with_bounds (tree exp
, rtx target
)
3490 if (!validate_arglist (exp
,
3491 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3492 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3493 INTEGER_TYPE
, VOID_TYPE
))
3497 tree dest
= CALL_EXPR_ARG (exp
, 0);
3498 tree src
= CALL_EXPR_ARG (exp
, 2);
3499 tree len
= CALL_EXPR_ARG (exp
, 4);
3500 rtx res
= expand_builtin_memory_copy_args (dest
, src
, len
, target
, exp
,
3503 /* Return src bounds with the result. */
3506 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3507 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3508 res
= chkp_join_splitted_slot (res
, bnd
);
3514 /* Expand a call EXP to the mempcpy builtin.
3515 Return NULL_RTX if we failed; the caller should emit a normal call,
3516 otherwise try to get the result in TARGET, if convenient (and in
3517 mode MODE if that's convenient). If ENDP is 0 return the
3518 destination pointer, if ENDP is 1 return the end pointer ala
3519 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3523 expand_builtin_mempcpy (tree exp
, rtx target
)
3525 if (!validate_arglist (exp
,
3526 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3529 tree dest
= CALL_EXPR_ARG (exp
, 0);
3530 tree src
= CALL_EXPR_ARG (exp
, 1);
3531 tree len
= CALL_EXPR_ARG (exp
, 2);
3533 /* Policy does not generally allow using compute_objsize (which
3534 is used internally by check_memop_size) to change code generation
3535 or drive optimization decisions.
3537 In this instance it is safe because the code we generate has
3538 the same semantics regardless of the return value of
3539 check_memop_sizes. Exactly the same amount of data is copied
3540 and the return value is exactly the same in both cases.
3542 Furthermore, check_memop_size always uses mode 0 for the call to
3543 compute_objsize, so the imprecise nature of compute_objsize is
3546 /* Avoid expanding mempcpy into memcpy when the call is determined
3547 to overflow the buffer. This also prevents the same overflow
3548 from being diagnosed again when expanding memcpy. */
3549 if (!check_memop_access (exp
, dest
, src
, len
))
3552 return expand_builtin_mempcpy_args (dest
, src
, len
,
3553 target
, exp
, /*endp=*/ 1);
3556 /* Expand an instrumented call EXP to the mempcpy builtin.
3557 Return NULL_RTX if we failed, the caller should emit a normal call,
3558 otherwise try to get the result in TARGET, if convenient (and in
3559 mode MODE if that's convenient). */
3562 expand_builtin_mempcpy_with_bounds (tree exp
, rtx target
)
3564 if (!validate_arglist (exp
,
3565 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3566 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3567 INTEGER_TYPE
, VOID_TYPE
))
3571 tree dest
= CALL_EXPR_ARG (exp
, 0);
3572 tree src
= CALL_EXPR_ARG (exp
, 2);
3573 tree len
= CALL_EXPR_ARG (exp
, 4);
3574 rtx res
= expand_builtin_mempcpy_args (dest
, src
, len
, target
,
3577 /* Return src bounds with the result. */
3580 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3581 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3582 res
= chkp_join_splitted_slot (res
, bnd
);
3588 /* Helper function to do the actual work for expand of memory copy family
3589 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3590 of memory from SRC to DEST and assign to TARGET if convenient.
3591 If ENDP is 0 return the
3592 destination pointer, if ENDP is 1 return the end pointer ala
3593 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3597 expand_builtin_memory_copy_args (tree dest
, tree src
, tree len
,
3598 rtx target
, tree exp
, int endp
)
3600 const char *src_str
;
3601 unsigned int src_align
= get_pointer_alignment (src
);
3602 unsigned int dest_align
= get_pointer_alignment (dest
);
3603 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3604 HOST_WIDE_INT expected_size
= -1;
3605 unsigned int expected_align
= 0;
3606 unsigned HOST_WIDE_INT min_size
;
3607 unsigned HOST_WIDE_INT max_size
;
3608 unsigned HOST_WIDE_INT probable_max_size
;
3610 /* If DEST is not a pointer type, call the normal function. */
3611 if (dest_align
== 0)
3614 /* If either SRC is not a pointer type, don't do this
3615 operation in-line. */
3619 if (currently_expanding_gimple_stmt
)
3620 stringop_block_profile (currently_expanding_gimple_stmt
,
3621 &expected_align
, &expected_size
);
3623 if (expected_align
< dest_align
)
3624 expected_align
= dest_align
;
3625 dest_mem
= get_memory_rtx (dest
, len
);
3626 set_mem_align (dest_mem
, dest_align
);
3627 len_rtx
= expand_normal (len
);
3628 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3629 &probable_max_size
);
3630 src_str
= c_getstr (src
);
3632 /* If SRC is a string constant and block move would be done
3633 by pieces, we can avoid loading the string from memory
3634 and only stored the computed constants. */
3636 && CONST_INT_P (len_rtx
)
3637 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3638 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3639 CONST_CAST (char *, src_str
),
3642 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3643 builtin_memcpy_read_str
,
3644 CONST_CAST (char *, src_str
),
3645 dest_align
, false, endp
);
3646 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3647 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3651 src_mem
= get_memory_rtx (src
, len
);
3652 set_mem_align (src_mem
, src_align
);
3654 /* Copy word part most expediently. */
3655 enum block_op_methods method
= BLOCK_OP_NORMAL
;
3656 if (CALL_EXPR_TAILCALL (exp
) && (endp
== 0 || target
== const0_rtx
))
3657 method
= BLOCK_OP_TAILCALL
;
3658 if (endp
== 1 && target
!= const0_rtx
)
3659 method
= BLOCK_OP_NO_LIBCALL_RET
;
3660 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
, method
,
3661 expected_align
, expected_size
,
3662 min_size
, max_size
, probable_max_size
);
3663 if (dest_addr
== pc_rtx
)
3668 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
3669 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3672 if (endp
&& target
!= const0_rtx
)
3674 dest_addr
= gen_rtx_PLUS (ptr_mode
, dest_addr
, len_rtx
);
3675 /* stpcpy pointer to last byte. */
3677 dest_addr
= gen_rtx_MINUS (ptr_mode
, dest_addr
, const1_rtx
);
3684 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3685 rtx target
, tree orig_exp
, int endp
)
3687 return expand_builtin_memory_copy_args (dest
, src
, len
, target
, orig_exp
,
3691 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3692 we failed, the caller should emit a normal call, otherwise try to
3693 get the result in TARGET, if convenient. If ENDP is 0 return the
3694 destination pointer, if ENDP is 1 return the end pointer ala
3695 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3699 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3701 struct expand_operand ops
[3];
3705 if (!targetm
.have_movstr ())
3708 dest_mem
= get_memory_rtx (dest
, NULL
);
3709 src_mem
= get_memory_rtx (src
, NULL
);
3712 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3713 dest_mem
= replace_equiv_address (dest_mem
, target
);
3716 create_output_operand (&ops
[0], endp
? target
: NULL_RTX
, Pmode
);
3717 create_fixed_operand (&ops
[1], dest_mem
);
3718 create_fixed_operand (&ops
[2], src_mem
);
3719 if (!maybe_expand_insn (targetm
.code_for_movstr
, 3, ops
))
3722 if (endp
&& target
!= const0_rtx
)
3724 target
= ops
[0].value
;
3725 /* movstr is supposed to set end to the address of the NUL
3726 terminator. If the caller requested a mempcpy-like return value,
3730 rtx tem
= plus_constant (GET_MODE (target
),
3731 gen_lowpart (GET_MODE (target
), target
), 1);
3732 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3738 /* Do some very basic size validation of a call to the strcpy builtin
3739 given by EXP. Return NULL_RTX to have the built-in expand to a call
3740 to the library function. */
3743 expand_builtin_strcat (tree exp
, rtx
)
3745 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
)
3746 || !warn_stringop_overflow
)
3749 tree dest
= CALL_EXPR_ARG (exp
, 0);
3750 tree src
= CALL_EXPR_ARG (exp
, 1);
3752 /* There is no way here to determine the length of the string in
3753 the destination to which the SRC string is being appended so
3754 just diagnose cases when the souce string is longer than
3755 the destination object. */
3757 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
3759 check_access (exp
, dest
, src
, /*size=*/NULL_TREE
, /*maxread=*/NULL_TREE
, src
,
3765 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3766 NULL_RTX if we failed the caller should emit a normal call, otherwise
3767 try to get the result in TARGET, if convenient (and in mode MODE if that's
3771 expand_builtin_strcpy (tree exp
, rtx target
)
3773 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3776 tree dest
= CALL_EXPR_ARG (exp
, 0);
3777 tree src
= CALL_EXPR_ARG (exp
, 1);
3779 if (warn_stringop_overflow
)
3781 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
3782 check_access (exp
, dest
, src
, /*size=*/NULL_TREE
, /*maxread=*/NULL_TREE
,
3786 if (rtx ret
= expand_builtin_strcpy_args (dest
, src
, target
))
3788 /* Check to see if the argument was declared attribute nonstring
3789 and if so, issue a warning since at this point it's not known
3790 to be nul-terminated. */
3791 tree fndecl
= get_callee_fndecl (exp
);
3792 maybe_warn_nonstring_arg (fndecl
, exp
);
3799 /* Helper function to do the actual work for expand_builtin_strcpy. The
3800 arguments to the builtin_strcpy call DEST and SRC are broken out
3801 so that this can also be called without constructing an actual CALL_EXPR.
3802 The other arguments and return value are the same as for
3803 expand_builtin_strcpy. */
3806 expand_builtin_strcpy_args (tree dest
, tree src
, rtx target
)
3808 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3811 /* Expand a call EXP to the stpcpy builtin.
3812 Return NULL_RTX if we failed the caller should emit a normal call,
3813 otherwise try to get the result in TARGET, if convenient (and in
3814 mode MODE if that's convenient). */
3817 expand_builtin_stpcpy (tree exp
, rtx target
, machine_mode mode
)
3820 location_t loc
= EXPR_LOCATION (exp
);
3822 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3825 dst
= CALL_EXPR_ARG (exp
, 0);
3826 src
= CALL_EXPR_ARG (exp
, 1);
3828 if (warn_stringop_overflow
)
3830 tree destsize
= compute_objsize (dst
, warn_stringop_overflow
- 1);
3831 check_access (exp
, dst
, src
, /*size=*/NULL_TREE
, /*maxread=*/NULL_TREE
,
3835 /* If return value is ignored, transform stpcpy into strcpy. */
3836 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
3838 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3839 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3840 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3847 /* Ensure we get an actual string whose length can be evaluated at
3848 compile-time, not an expression containing a string. This is
3849 because the latter will potentially produce pessimized code
3850 when used to produce the return value. */
3851 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3852 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3854 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3855 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3856 target
, exp
, /*endp=*/2);
3861 if (TREE_CODE (len
) == INTEGER_CST
)
3863 rtx len_rtx
= expand_normal (len
);
3865 if (CONST_INT_P (len_rtx
))
3867 ret
= expand_builtin_strcpy_args (dst
, src
, target
);
3873 if (mode
!= VOIDmode
)
3874 target
= gen_reg_rtx (mode
);
3876 target
= gen_reg_rtx (GET_MODE (ret
));
3878 if (GET_MODE (target
) != GET_MODE (ret
))
3879 ret
= gen_lowpart (GET_MODE (target
), ret
);
3881 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
3882 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3890 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3894 /* Check a call EXP to the stpncpy built-in for validity.
3895 Return NULL_RTX on both success and failure. */
3898 expand_builtin_stpncpy (tree exp
, rtx
)
3900 if (!validate_arglist (exp
,
3901 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
3902 || !warn_stringop_overflow
)
3905 /* The source and destination of the call. */
3906 tree dest
= CALL_EXPR_ARG (exp
, 0);
3907 tree src
= CALL_EXPR_ARG (exp
, 1);
3909 /* The exact number of bytes to write (not the maximum). */
3910 tree len
= CALL_EXPR_ARG (exp
, 2);
3912 /* The size of the destination object. */
3913 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
3915 check_access (exp
, dest
, src
, len
, /*maxread=*/NULL_TREE
, src
, destsize
);
3920 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3921 bytes from constant string DATA + OFFSET and return it as target
3925 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3926 scalar_int_mode mode
)
3928 const char *str
= (const char *) data
;
3930 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3933 return c_readstr (str
+ offset
, mode
);
3936 /* Helper to check the sizes of sequences and the destination of calls
3937 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3938 success (no overflow or invalid sizes), false otherwise. */
3941 check_strncat_sizes (tree exp
, tree objsize
)
3943 tree dest
= CALL_EXPR_ARG (exp
, 0);
3944 tree src
= CALL_EXPR_ARG (exp
, 1);
3945 tree maxread
= CALL_EXPR_ARG (exp
, 2);
3947 /* Try to determine the range of lengths that the source expression
3950 get_range_strlen (src
, lenrange
);
3952 /* Try to verify that the destination is big enough for the shortest
3955 if (!objsize
&& warn_stringop_overflow
)
3957 /* If it hasn't been provided by __strncat_chk, try to determine
3958 the size of the destination object into which the source is
3960 objsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
3963 /* Add one for the terminating nul. */
3964 tree srclen
= (lenrange
[0]
3965 ? fold_build2 (PLUS_EXPR
, size_type_node
, lenrange
[0],
3969 /* The strncat function copies at most MAXREAD bytes and always appends
3970 the terminating nul so the specified upper bound should never be equal
3971 to (or greater than) the size of the destination. */
3972 if (tree_fits_uhwi_p (maxread
) && tree_fits_uhwi_p (objsize
)
3973 && tree_int_cst_equal (objsize
, maxread
))
3975 location_t loc
= tree_nonartificial_location (exp
);
3976 loc
= expansion_point_location_if_in_system_header (loc
);
3978 warning_at (loc
, OPT_Wstringop_overflow_
,
3979 "%K%qD specified bound %E equals destination size",
3980 exp
, get_callee_fndecl (exp
), maxread
);
3986 || (maxread
&& tree_fits_uhwi_p (maxread
)
3987 && tree_fits_uhwi_p (srclen
)
3988 && tree_int_cst_lt (maxread
, srclen
)))
3991 /* The number of bytes to write is LEN but check_access will also
3992 check SRCLEN if LEN's value isn't known. */
3993 return check_access (exp
, dest
, src
, /*size=*/NULL_TREE
, maxread
, srclen
,
3997 /* Similar to expand_builtin_strcat, do some very basic size validation
3998 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
3999 the built-in expand to a call to the library function. */
4002 expand_builtin_strncat (tree exp
, rtx
)
4004 if (!validate_arglist (exp
,
4005 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4006 || !warn_stringop_overflow
)
4009 tree dest
= CALL_EXPR_ARG (exp
, 0);
4010 tree src
= CALL_EXPR_ARG (exp
, 1);
4011 /* The upper bound on the number of bytes to write. */
4012 tree maxread
= CALL_EXPR_ARG (exp
, 2);
4013 /* The length of the source sequence. */
4014 tree slen
= c_strlen (src
, 1);
4016 /* Try to determine the range of lengths that the source expression
4020 lenrange
[0] = lenrange
[1] = slen
;
4022 get_range_strlen (src
, lenrange
);
4024 /* Try to verify that the destination is big enough for the shortest
4025 string. First try to determine the size of the destination object
4026 into which the source is being copied. */
4027 tree destsize
= compute_objsize (dest
, warn_stringop_overflow
- 1);
4029 /* Add one for the terminating nul. */
4030 tree srclen
= (lenrange
[0]
4031 ? fold_build2 (PLUS_EXPR
, size_type_node
, lenrange
[0],
4035 /* The strncat function copies at most MAXREAD bytes and always appends
4036 the terminating nul so the specified upper bound should never be equal
4037 to (or greater than) the size of the destination. */
4038 if (tree_fits_uhwi_p (maxread
) && tree_fits_uhwi_p (destsize
)
4039 && tree_int_cst_equal (destsize
, maxread
))
4041 location_t loc
= tree_nonartificial_location (exp
);
4042 loc
= expansion_point_location_if_in_system_header (loc
);
4044 warning_at (loc
, OPT_Wstringop_overflow_
,
4045 "%K%qD specified bound %E equals destination size",
4046 exp
, get_callee_fndecl (exp
), maxread
);
4052 || (maxread
&& tree_fits_uhwi_p (maxread
)
4053 && tree_fits_uhwi_p (srclen
)
4054 && tree_int_cst_lt (maxread
, srclen
)))
4057 /* The number of bytes to write is SRCLEN. */
4058 check_access (exp
, dest
, src
, NULL_TREE
, maxread
, srclen
, destsize
);
4063 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4064 NULL_RTX if we failed the caller should emit a normal call. */
4067 expand_builtin_strncpy (tree exp
, rtx target
)
4069 location_t loc
= EXPR_LOCATION (exp
);
4071 if (validate_arglist (exp
,
4072 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4074 tree dest
= CALL_EXPR_ARG (exp
, 0);
4075 tree src
= CALL_EXPR_ARG (exp
, 1);
4076 /* The number of bytes to write (not the maximum). */
4077 tree len
= CALL_EXPR_ARG (exp
, 2);
4078 /* The length of the source sequence. */
4079 tree slen
= c_strlen (src
, 1);
4081 if (warn_stringop_overflow
)
4083 tree destsize
= compute_objsize (dest
,
4084 warn_stringop_overflow
- 1);
4086 /* The number of bytes to write is LEN but check_access will also
4087 check SLEN if LEN's value isn't known. */
4088 check_access (exp
, dest
, src
, len
, /*maxread=*/NULL_TREE
, src
,
4092 /* We must be passed a constant len and src parameter. */
4093 if (!tree_fits_uhwi_p (len
) || !slen
|| !tree_fits_uhwi_p (slen
))
4096 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
4098 /* We're required to pad with trailing zeros if the requested
4099 len is greater than strlen(s2)+1. In that case try to
4100 use store_by_pieces, if it fails, punt. */
4101 if (tree_int_cst_lt (slen
, len
))
4103 unsigned int dest_align
= get_pointer_alignment (dest
);
4104 const char *p
= c_getstr (src
);
4107 if (!p
|| dest_align
== 0 || !tree_fits_uhwi_p (len
)
4108 || !can_store_by_pieces (tree_to_uhwi (len
),
4109 builtin_strncpy_read_str
,
4110 CONST_CAST (char *, p
),
4114 dest_mem
= get_memory_rtx (dest
, len
);
4115 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
4116 builtin_strncpy_read_str
,
4117 CONST_CAST (char *, p
), dest_align
, false, 0);
4118 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
4119 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4126 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4127 bytes from constant string DATA + OFFSET and return it as target
4131 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
4132 scalar_int_mode mode
)
4134 const char *c
= (const char *) data
;
4135 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
4137 memset (p
, *c
, GET_MODE_SIZE (mode
));
4139 return c_readstr (p
, mode
);
4142 /* Callback routine for store_by_pieces. Return the RTL of a register
4143 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4144 char value given in the RTL register data. For example, if mode is
4145 4 bytes wide, return the RTL for 0x01010101*data. */
4148 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
4149 scalar_int_mode mode
)
4155 size
= GET_MODE_SIZE (mode
);
4159 p
= XALLOCAVEC (char, size
);
4160 memset (p
, 1, size
);
4161 coeff
= c_readstr (p
, mode
);
4163 target
= convert_to_mode (mode
, (rtx
) data
, 1);
4164 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
4165 return force_reg (mode
, target
);
4168 /* Expand expression EXP, which is a call to the memset builtin. Return
4169 NULL_RTX if we failed the caller should emit a normal call, otherwise
4170 try to get the result in TARGET, if convenient (and in mode MODE if that's
4174 expand_builtin_memset (tree exp
, rtx target
, machine_mode mode
)
4176 if (!validate_arglist (exp
,
4177 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4180 tree dest
= CALL_EXPR_ARG (exp
, 0);
4181 tree val
= CALL_EXPR_ARG (exp
, 1);
4182 tree len
= CALL_EXPR_ARG (exp
, 2);
4184 check_memop_access (exp
, dest
, NULL_TREE
, len
);
4186 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
4189 /* Expand expression EXP, which is an instrumented call to the memset builtin.
4190 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
4191 try to get the result in TARGET, if convenient (and in mode MODE if that's
4195 expand_builtin_memset_with_bounds (tree exp
, rtx target
, machine_mode mode
)
4197 if (!validate_arglist (exp
,
4198 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
4199 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4203 tree dest
= CALL_EXPR_ARG (exp
, 0);
4204 tree val
= CALL_EXPR_ARG (exp
, 2);
4205 tree len
= CALL_EXPR_ARG (exp
, 3);
4206 rtx res
= expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
4208 /* Return src bounds with the result. */
4211 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
4212 expand_normal (CALL_EXPR_ARG (exp
, 1)));
4213 res
= chkp_join_splitted_slot (res
, bnd
);
4219 /* Helper function to do the actual work for expand_builtin_memset. The
4220 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4221 so that this can also be called without constructing an actual CALL_EXPR.
4222 The other arguments and return value are the same as for
4223 expand_builtin_memset. */
4226 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
4227 rtx target
, machine_mode mode
, tree orig_exp
)
4230 enum built_in_function fcode
;
4231 machine_mode val_mode
;
4233 unsigned int dest_align
;
4234 rtx dest_mem
, dest_addr
, len_rtx
;
4235 HOST_WIDE_INT expected_size
= -1;
4236 unsigned int expected_align
= 0;
4237 unsigned HOST_WIDE_INT min_size
;
4238 unsigned HOST_WIDE_INT max_size
;
4239 unsigned HOST_WIDE_INT probable_max_size
;
4241 dest_align
= get_pointer_alignment (dest
);
4243 /* If DEST is not a pointer type, don't do this operation in-line. */
4244 if (dest_align
== 0)
4247 if (currently_expanding_gimple_stmt
)
4248 stringop_block_profile (currently_expanding_gimple_stmt
,
4249 &expected_align
, &expected_size
);
4251 if (expected_align
< dest_align
)
4252 expected_align
= dest_align
;
4254 /* If the LEN parameter is zero, return DEST. */
4255 if (integer_zerop (len
))
4257 /* Evaluate and ignore VAL in case it has side-effects. */
4258 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4259 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
4262 /* Stabilize the arguments in case we fail. */
4263 dest
= builtin_save_expr (dest
);
4264 val
= builtin_save_expr (val
);
4265 len
= builtin_save_expr (len
);
4267 len_rtx
= expand_normal (len
);
4268 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
4269 &probable_max_size
);
4270 dest_mem
= get_memory_rtx (dest
, len
);
4271 val_mode
= TYPE_MODE (unsigned_char_type_node
);
4273 if (TREE_CODE (val
) != INTEGER_CST
)
4277 val_rtx
= expand_normal (val
);
4278 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
4280 /* Assume that we can memset by pieces if we can store
4281 * the coefficients by pieces (in the required modes).
4282 * We can't pass builtin_memset_gen_str as that emits RTL. */
4284 if (tree_fits_uhwi_p (len
)
4285 && can_store_by_pieces (tree_to_uhwi (len
),
4286 builtin_memset_read_str
, &c
, dest_align
,
4289 val_rtx
= force_reg (val_mode
, val_rtx
);
4290 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
4291 builtin_memset_gen_str
, val_rtx
, dest_align
,
4294 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
4295 dest_align
, expected_align
,
4296 expected_size
, min_size
, max_size
,
4300 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4301 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4305 if (target_char_cast (val
, &c
))
4310 if (tree_fits_uhwi_p (len
)
4311 && can_store_by_pieces (tree_to_uhwi (len
),
4312 builtin_memset_read_str
, &c
, dest_align
,
4314 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
4315 builtin_memset_read_str
, &c
, dest_align
, true, 0);
4316 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
4317 gen_int_mode (c
, val_mode
),
4318 dest_align
, expected_align
,
4319 expected_size
, min_size
, max_size
,
4323 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4324 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
4328 set_mem_align (dest_mem
, dest_align
);
4329 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
4330 CALL_EXPR_TAILCALL (orig_exp
)
4331 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
4332 expected_align
, expected_size
,
4338 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
4339 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
4345 fndecl
= get_callee_fndecl (orig_exp
);
4346 fcode
= DECL_FUNCTION_CODE (fndecl
);
4347 if (fcode
== BUILT_IN_MEMSET
4348 || fcode
== BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
)
4349 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
4351 else if (fcode
== BUILT_IN_BZERO
)
4352 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
4356 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4357 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
4358 return expand_call (fn
, target
, target
== const0_rtx
);
4361 /* Expand expression EXP, which is a call to the bzero builtin. Return
4362 NULL_RTX if we failed the caller should emit a normal call. */
4365 expand_builtin_bzero (tree exp
)
4367 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4370 tree dest
= CALL_EXPR_ARG (exp
, 0);
4371 tree size
= CALL_EXPR_ARG (exp
, 1);
4373 check_memop_access (exp
, dest
, NULL_TREE
, size
);
4375 /* New argument list transforming bzero(ptr x, int y) to
4376 memset(ptr x, int 0, size_t y). This is done this way
4377 so that if it isn't expanded inline, we fallback to
4378 calling bzero instead of memset. */
4380 location_t loc
= EXPR_LOCATION (exp
);
4382 return expand_builtin_memset_args (dest
, integer_zero_node
,
4383 fold_convert_loc (loc
,
4384 size_type_node
, size
),
4385 const0_rtx
, VOIDmode
, exp
);
4388 /* Try to expand cmpstr operation ICODE with the given operands.
4389 Return the result rtx on success, otherwise return null. */
4392 expand_cmpstr (insn_code icode
, rtx target
, rtx arg1_rtx
, rtx arg2_rtx
,
4393 HOST_WIDE_INT align
)
4395 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
4397 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
4400 struct expand_operand ops
[4];
4401 create_output_operand (&ops
[0], target
, insn_mode
);
4402 create_fixed_operand (&ops
[1], arg1_rtx
);
4403 create_fixed_operand (&ops
[2], arg2_rtx
);
4404 create_integer_operand (&ops
[3], align
);
4405 if (maybe_expand_insn (icode
, 4, ops
))
4406 return ops
[0].value
;
4410 /* Expand expression EXP, which is a call to the memcmp built-in function.
4411 Return NULL_RTX if we failed and the caller should emit a normal call,
4412 otherwise try to get the result in TARGET, if convenient.
4413 RESULT_EQ is true if we can relax the returned value to be either zero
4414 or nonzero, without caring about the sign. */
4417 expand_builtin_memcmp (tree exp
, rtx target
, bool result_eq
)
4419 if (!validate_arglist (exp
,
4420 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4423 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4424 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4425 tree len
= CALL_EXPR_ARG (exp
, 2);
4427 /* Diagnose calls where the specified length exceeds the size of either
4429 if (warn_stringop_overflow
)
4431 tree size
= compute_objsize (arg1
, 0);
4432 if (check_access (exp
, /*dst=*/NULL_TREE
, /*src=*/NULL_TREE
, len
,
4433 /*maxread=*/NULL_TREE
, size
, /*objsize=*/NULL_TREE
))
4435 size
= compute_objsize (arg2
, 0);
4436 check_access (exp
, /*dst=*/NULL_TREE
, /*src=*/NULL_TREE
, len
,
4437 /*maxread=*/NULL_TREE
, size
, /*objsize=*/NULL_TREE
);
4441 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4442 location_t loc
= EXPR_LOCATION (exp
);
4444 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4445 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4447 /* If we don't have POINTER_TYPE, call the function. */
4448 if (arg1_align
== 0 || arg2_align
== 0)
4451 rtx arg1_rtx
= get_memory_rtx (arg1
, len
);
4452 rtx arg2_rtx
= get_memory_rtx (arg2
, len
);
4453 rtx len_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
4455 /* Set MEM_SIZE as appropriate. */
4456 if (CONST_INT_P (len_rtx
))
4458 set_mem_size (arg1_rtx
, INTVAL (len_rtx
));
4459 set_mem_size (arg2_rtx
, INTVAL (len_rtx
));
4462 by_pieces_constfn constfn
= NULL
;
4464 const char *src_str
= c_getstr (arg2
);
4465 if (result_eq
&& src_str
== NULL
)
4467 src_str
= c_getstr (arg1
);
4468 if (src_str
!= NULL
)
4469 std::swap (arg1_rtx
, arg2_rtx
);
4472 /* If SRC is a string constant and block move would be done
4473 by pieces, we can avoid loading the string from memory
4474 and only stored the computed constants. */
4476 && CONST_INT_P (len_rtx
)
4477 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1)
4478 constfn
= builtin_memcpy_read_str
;
4480 rtx result
= emit_block_cmp_hints (arg1_rtx
, arg2_rtx
, len_rtx
,
4481 TREE_TYPE (len
), target
,
4483 CONST_CAST (char *, src_str
));
4487 /* Return the value in the proper mode for this function. */
4488 if (GET_MODE (result
) == mode
)
4493 convert_move (target
, result
, 0);
4497 return convert_to_mode (mode
, result
, 0);
4503 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4504 if we failed the caller should emit a normal call, otherwise try to get
4505 the result in TARGET, if convenient. */
4508 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
4510 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4513 insn_code cmpstr_icode
= direct_optab_handler (cmpstr_optab
, SImode
);
4514 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4515 if (cmpstr_icode
== CODE_FOR_nothing
&& cmpstrn_icode
== CODE_FOR_nothing
)
4518 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4519 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4521 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4522 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4524 /* If we don't have POINTER_TYPE, call the function. */
4525 if (arg1_align
== 0 || arg2_align
== 0)
4528 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4529 arg1
= builtin_save_expr (arg1
);
4530 arg2
= builtin_save_expr (arg2
);
4532 rtx arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4533 rtx arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4535 rtx result
= NULL_RTX
;
4536 /* Try to call cmpstrsi. */
4537 if (cmpstr_icode
!= CODE_FOR_nothing
)
4538 result
= expand_cmpstr (cmpstr_icode
, target
, arg1_rtx
, arg2_rtx
,
4539 MIN (arg1_align
, arg2_align
));
4541 /* Try to determine at least one length and call cmpstrnsi. */
4542 if (!result
&& cmpstrn_icode
!= CODE_FOR_nothing
)
4547 tree len1
= c_strlen (arg1
, 1);
4548 tree len2
= c_strlen (arg2
, 1);
4551 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4553 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4555 /* If we don't have a constant length for the first, use the length
4556 of the second, if we know it. We don't require a constant for
4557 this case; some cost analysis could be done if both are available
4558 but neither is constant. For now, assume they're equally cheap,
4559 unless one has side effects. If both strings have constant lengths,
4566 else if (TREE_SIDE_EFFECTS (len1
))
4568 else if (TREE_SIDE_EFFECTS (len2
))
4570 else if (TREE_CODE (len1
) != INTEGER_CST
)
4572 else if (TREE_CODE (len2
) != INTEGER_CST
)
4574 else if (tree_int_cst_lt (len1
, len2
))
4579 /* If both arguments have side effects, we cannot optimize. */
4580 if (len
&& !TREE_SIDE_EFFECTS (len
))
4582 arg3_rtx
= expand_normal (len
);
4583 result
= expand_cmpstrn_or_cmpmem
4584 (cmpstrn_icode
, target
, arg1_rtx
, arg2_rtx
, TREE_TYPE (len
),
4585 arg3_rtx
, MIN (arg1_align
, arg2_align
));
4589 tree fndecl
= get_callee_fndecl (exp
);
4592 /* Check to see if the argument was declared attribute nonstring
4593 and if so, issue a warning since at this point it's not known
4594 to be nul-terminated. */
4595 maybe_warn_nonstring_arg (fndecl
, exp
);
4597 /* Return the value in the proper mode for this function. */
4598 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4599 if (GET_MODE (result
) == mode
)
4602 return convert_to_mode (mode
, result
, 0);
4603 convert_move (target
, result
, 0);
4607 /* Expand the library call ourselves using a stabilized argument
4608 list to avoid re-evaluating the function's arguments twice. */
4609 tree fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
4610 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4611 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4612 return expand_call (fn
, target
, target
== const0_rtx
);
4615 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4616 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4617 the result in TARGET, if convenient. */
4620 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
4621 ATTRIBUTE_UNUSED machine_mode mode
)
4623 if (!validate_arglist (exp
,
4624 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4627 /* If c_strlen can determine an expression for one of the string
4628 lengths, and it doesn't have side effects, then emit cmpstrnsi
4629 using length MIN(strlen(string)+1, arg3). */
4630 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
4631 if (cmpstrn_icode
== CODE_FOR_nothing
)
4636 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4637 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4638 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4640 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
4641 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
4643 tree len1
= c_strlen (arg1
, 1);
4644 tree len2
= c_strlen (arg2
, 1);
4646 location_t loc
= EXPR_LOCATION (exp
);
4649 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
4651 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
4653 tree len3
= fold_convert_loc (loc
, sizetype
, arg3
);
4655 /* If we don't have a constant length for the first, use the length
4656 of the second, if we know it. If neither string is constant length,
4657 use the given length argument. We don't require a constant for
4658 this case; some cost analysis could be done if both are available
4659 but neither is constant. For now, assume they're equally cheap,
4660 unless one has side effects. If both strings have constant lengths,
4669 else if (TREE_SIDE_EFFECTS (len1
))
4671 else if (TREE_SIDE_EFFECTS (len2
))
4673 else if (TREE_CODE (len1
) != INTEGER_CST
)
4675 else if (TREE_CODE (len2
) != INTEGER_CST
)
4677 else if (tree_int_cst_lt (len1
, len2
))
4682 /* If we are not using the given length, we must incorporate it here.
4683 The actual new length parameter will be MIN(len,arg3) in this case. */
4685 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
, len3
);
4686 rtx arg1_rtx
= get_memory_rtx (arg1
, len
);
4687 rtx arg2_rtx
= get_memory_rtx (arg2
, len
);
4688 rtx arg3_rtx
= expand_normal (len
);
4689 rtx result
= expand_cmpstrn_or_cmpmem (cmpstrn_icode
, target
, arg1_rtx
,
4690 arg2_rtx
, TREE_TYPE (len
), arg3_rtx
,
4691 MIN (arg1_align
, arg2_align
));
4693 tree fndecl
= get_callee_fndecl (exp
);
4696 /* Check to see if the argument was declared attribute nonstring
4697 and if so, issue a warning since at this point it's not known
4698 to be nul-terminated. */
4699 maybe_warn_nonstring_arg (fndecl
, exp
);
4701 /* Return the value in the proper mode for this function. */
4702 mode
= TYPE_MODE (TREE_TYPE (exp
));
4703 if (GET_MODE (result
) == mode
)
4706 return convert_to_mode (mode
, result
, 0);
4707 convert_move (target
, result
, 0);
4711 /* Expand the library call ourselves using a stabilized argument
4712 list to avoid re-evaluating the function's arguments twice. */
4713 tree fn
= build_call_nofold_loc (loc
, fndecl
, 3, arg1
, arg2
, len
);
4714 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
4715 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4716 return expand_call (fn
, target
, target
== const0_rtx
);
4719 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4720 if that's convenient. */
4723 expand_builtin_saveregs (void)
4728 /* Don't do __builtin_saveregs more than once in a function.
4729 Save the result of the first call and reuse it. */
4730 if (saveregs_value
!= 0)
4731 return saveregs_value
;
4733 /* When this function is called, it means that registers must be
4734 saved on entry to this function. So we migrate the call to the
4735 first insn of this function. */
4739 /* Do whatever the machine needs done in this case. */
4740 val
= targetm
.calls
.expand_builtin_saveregs ();
4745 saveregs_value
= val
;
4747 /* Put the insns after the NOTE that starts the function. If this
4748 is inside a start_sequence, make the outer-level insn chain current, so
4749 the code is placed at the start of the function. */
4750 push_topmost_sequence ();
4751 emit_insn_after (seq
, entry_of_function ());
4752 pop_topmost_sequence ();
4757 /* Expand a call to __builtin_next_arg. */
4760 expand_builtin_next_arg (void)
4762 /* Checking arguments is already done in fold_builtin_next_arg
4763 that must be called before this function. */
4764 return expand_binop (ptr_mode
, add_optab
,
4765 crtl
->args
.internal_arg_pointer
,
4766 crtl
->args
.arg_offset_rtx
,
4767 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4770 /* Make it easier for the backends by protecting the valist argument
4771 from multiple evaluations. */
4774 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4776 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4778 /* The current way of determining the type of valist is completely
4779 bogus. We should have the information on the va builtin instead. */
4781 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4783 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4785 if (TREE_SIDE_EFFECTS (valist
))
4786 valist
= save_expr (valist
);
4788 /* For this case, the backends will be expecting a pointer to
4789 vatype, but it's possible we've actually been given an array
4790 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4792 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4794 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4795 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4800 tree pt
= build_pointer_type (vatype
);
4804 if (! TREE_SIDE_EFFECTS (valist
))
4807 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4808 TREE_SIDE_EFFECTS (valist
) = 1;
4811 if (TREE_SIDE_EFFECTS (valist
))
4812 valist
= save_expr (valist
);
4813 valist
= fold_build2_loc (loc
, MEM_REF
,
4814 vatype
, valist
, build_int_cst (pt
, 0));
4820 /* The "standard" definition of va_list is void*. */
4823 std_build_builtin_va_list (void)
4825 return ptr_type_node
;
4828 /* The "standard" abi va_list is va_list_type_node. */
4831 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4833 return va_list_type_node
;
4836 /* The "standard" type of va_list is va_list_type_node. */
4839 std_canonical_va_list_type (tree type
)
4843 wtype
= va_list_type_node
;
4846 if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4848 /* If va_list is an array type, the argument may have decayed
4849 to a pointer type, e.g. by being passed to another function.
4850 In that case, unwrap both types so that we can compare the
4851 underlying records. */
4852 if (TREE_CODE (htype
) == ARRAY_TYPE
4853 || POINTER_TYPE_P (htype
))
4855 wtype
= TREE_TYPE (wtype
);
4856 htype
= TREE_TYPE (htype
);
4859 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4860 return va_list_type_node
;
4865 /* The "standard" implementation of va_start: just assign `nextarg' to
4869 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4871 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4872 convert_move (va_r
, nextarg
, 0);
4874 /* We do not have any valid bounds for the pointer, so
4875 just store zero bounds for it. */
4876 if (chkp_function_instrumented_p (current_function_decl
))
4877 chkp_expand_bounds_reset_for_mem (valist
,
4878 make_tree (TREE_TYPE (valist
),
4882 /* Expand EXP, a call to __builtin_va_start. */
4885 expand_builtin_va_start (tree exp
)
4889 location_t loc
= EXPR_LOCATION (exp
);
4891 if (call_expr_nargs (exp
) < 2)
4893 error_at (loc
, "too few arguments to function %<va_start%>");
4897 if (fold_builtin_next_arg (exp
, true))
4900 nextarg
= expand_builtin_next_arg ();
4901 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4903 if (targetm
.expand_builtin_va_start
)
4904 targetm
.expand_builtin_va_start (valist
, nextarg
);
4906 std_expand_builtin_va_start (valist
, nextarg
);
4911 /* Expand EXP, a call to __builtin_va_end. */
4914 expand_builtin_va_end (tree exp
)
4916 tree valist
= CALL_EXPR_ARG (exp
, 0);
4918 /* Evaluate for side effects, if needed. I hate macros that don't
4920 if (TREE_SIDE_EFFECTS (valist
))
4921 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4926 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4927 builtin rather than just as an assignment in stdarg.h because of the
4928 nastiness of array-type va_list types. */
4931 expand_builtin_va_copy (tree exp
)
4934 location_t loc
= EXPR_LOCATION (exp
);
4936 dst
= CALL_EXPR_ARG (exp
, 0);
4937 src
= CALL_EXPR_ARG (exp
, 1);
4939 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4940 src
= stabilize_va_list_loc (loc
, src
, 0);
4942 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4944 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4946 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4947 TREE_SIDE_EFFECTS (t
) = 1;
4948 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4952 rtx dstb
, srcb
, size
;
4954 /* Evaluate to pointers. */
4955 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4956 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4957 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4958 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4960 dstb
= convert_memory_address (Pmode
, dstb
);
4961 srcb
= convert_memory_address (Pmode
, srcb
);
4963 /* "Dereference" to BLKmode memories. */
4964 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4965 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4966 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4967 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4968 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4969 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4972 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4978 /* Expand a call to one of the builtin functions __builtin_frame_address or
4979 __builtin_return_address. */
4982 expand_builtin_frame_address (tree fndecl
, tree exp
)
4984 /* The argument must be a nonnegative integer constant.
4985 It counts the number of frames to scan up the stack.
4986 The value is either the frame pointer value or the return
4987 address saved in that frame. */
4988 if (call_expr_nargs (exp
) == 0)
4989 /* Warning about missing arg was already issued. */
4991 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp
, 0)))
4993 error ("invalid argument to %qD", fndecl
);
4998 /* Number of frames to scan up the stack. */
4999 unsigned HOST_WIDE_INT count
= tree_to_uhwi (CALL_EXPR_ARG (exp
, 0));
5001 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
), count
);
5003 /* Some ports cannot access arbitrary stack frames. */
5006 warning (0, "unsupported argument to %qD", fndecl
);
5012 /* Warn since no effort is made to ensure that any frame
5013 beyond the current one exists or can be safely reached. */
5014 warning (OPT_Wframe_address
, "calling %qD with "
5015 "a nonzero argument is unsafe", fndecl
);
5018 /* For __builtin_frame_address, return what we've got. */
5019 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
5023 && ! CONSTANT_P (tem
))
5024 tem
= copy_addr_to_reg (tem
);
5029 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5030 failed and the caller should emit a normal call. */
5033 expand_builtin_alloca (tree exp
)
5038 tree fndecl
= get_callee_fndecl (exp
);
5039 HOST_WIDE_INT max_size
;
5040 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5041 bool alloca_for_var
= CALL_ALLOCA_FOR_VAR_P (exp
);
5043 = (fcode
== BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5044 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
,
5046 : fcode
== BUILT_IN_ALLOCA_WITH_ALIGN
5047 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
5048 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
5053 if ((alloca_for_var
&& !warn_vla_limit
)
5054 || (!alloca_for_var
&& !warn_alloca_limit
))
5056 /* -Walloca-larger-than and -Wvla-larger-than settings override
5057 the more general -Walloc-size-larger-than so unless either of
5058 the former options is specified check the alloca arguments for
5060 tree args
[] = { CALL_EXPR_ARG (exp
, 0), NULL_TREE
};
5061 int idx
[] = { 0, -1 };
5062 maybe_warn_alloc_args_overflow (fndecl
, exp
, args
, idx
);
5065 /* Compute the argument. */
5066 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5068 /* Compute the alignment. */
5069 align
= (fcode
== BUILT_IN_ALLOCA
5071 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1)));
5073 /* Compute the maximum size. */
5074 max_size
= (fcode
== BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5075 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 2))
5078 /* Allocate the desired space. If the allocation stems from the declaration
5079 of a variable-sized object, it cannot accumulate. */
5081 = allocate_dynamic_stack_space (op0
, 0, align
, max_size
, alloca_for_var
);
5082 result
= convert_memory_address (ptr_mode
, result
);
5087 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5088 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5089 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5090 handle_builtin_stack_restore function. */
5093 expand_asan_emit_allocas_unpoison (tree exp
)
5095 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5096 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5097 rtx top
= expand_expr (arg0
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
5098 rtx bot
= expand_expr (arg1
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
5099 rtx off
= expand_simple_binop (Pmode
, MINUS
, virtual_stack_dynamic_rtx
,
5100 stack_pointer_rtx
, NULL_RTX
, 0,
5102 off
= convert_modes (ptr_mode
, Pmode
, off
, 0);
5103 bot
= expand_simple_binop (ptr_mode
, PLUS
, bot
, off
, NULL_RTX
, 0,
5105 rtx ret
= init_one_libfunc ("__asan_allocas_unpoison");
5106 ret
= emit_library_call_value (ret
, NULL_RTX
, LCT_NORMAL
, ptr_mode
,
5107 top
, ptr_mode
, bot
, ptr_mode
);
5111 /* Expand a call to bswap builtin in EXP.
5112 Return NULL_RTX if a normal call should be emitted rather than expanding the
5113 function in-line. If convenient, the result should be placed in TARGET.
5114 SUBTARGET may be used as the target for computing one of EXP's operands. */
5117 expand_builtin_bswap (machine_mode target_mode
, tree exp
, rtx target
,
5123 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5126 arg
= CALL_EXPR_ARG (exp
, 0);
5127 op0
= expand_expr (arg
,
5128 subtarget
&& GET_MODE (subtarget
) == target_mode
5129 ? subtarget
: NULL_RTX
,
5130 target_mode
, EXPAND_NORMAL
);
5131 if (GET_MODE (op0
) != target_mode
)
5132 op0
= convert_to_mode (target_mode
, op0
, 1);
5134 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
5136 gcc_assert (target
);
5138 return convert_to_mode (target_mode
, target
, 1);
5141 /* Expand a call to a unary builtin in EXP.
5142 Return NULL_RTX if a normal call should be emitted rather than expanding the
5143 function in-line. If convenient, the result should be placed in TARGET.
5144 SUBTARGET may be used as the target for computing one of EXP's operands. */
5147 expand_builtin_unop (machine_mode target_mode
, tree exp
, rtx target
,
5148 rtx subtarget
, optab op_optab
)
5152 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5155 /* Compute the argument. */
5156 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
5158 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
5159 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
5160 VOIDmode
, EXPAND_NORMAL
);
5161 /* Compute op, into TARGET if possible.
5162 Set TARGET to wherever the result comes back. */
5163 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
5164 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
5165 gcc_assert (target
);
5167 return convert_to_mode (target_mode
, target
, 0);
5170 /* Expand a call to __builtin_expect. We just return our argument
5171 as the builtin_expect semantic should've been already executed by
5172 tree branch prediction pass. */
5175 expand_builtin_expect (tree exp
, rtx target
)
5179 if (call_expr_nargs (exp
) < 2)
5181 arg
= CALL_EXPR_ARG (exp
, 0);
5183 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5184 /* When guessing was done, the hints should be already stripped away. */
5185 gcc_assert (!flag_guess_branch_prob
5186 || optimize
== 0 || seen_error ());
5190 /* Expand a call to __builtin_assume_aligned. We just return our first
5191 argument as the builtin_assume_aligned semantic should've been already
5195 expand_builtin_assume_aligned (tree exp
, rtx target
)
5197 if (call_expr_nargs (exp
) < 2)
5199 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
5201 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
5202 && (call_expr_nargs (exp
) < 3
5203 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
5208 expand_builtin_trap (void)
5210 if (targetm
.have_trap ())
5212 rtx_insn
*insn
= emit_insn (targetm
.gen_trap ());
5213 /* For trap insns when not accumulating outgoing args force
5214 REG_ARGS_SIZE note to prevent crossjumping of calls with
5215 different args sizes. */
5216 if (!ACCUMULATE_OUTGOING_ARGS
)
5217 add_args_size_note (insn
, stack_pointer_delta
);
5221 tree fn
= builtin_decl_implicit (BUILT_IN_ABORT
);
5222 tree call_expr
= build_call_expr (fn
, 0);
5223 expand_call (call_expr
, NULL_RTX
, false);
5229 /* Expand a call to __builtin_unreachable. We do nothing except emit
5230 a barrier saying that control flow will not pass here.
5232 It is the responsibility of the program being compiled to ensure
5233 that control flow does never reach __builtin_unreachable. */
5235 expand_builtin_unreachable (void)
5240 /* Expand EXP, a call to fabs, fabsf or fabsl.
5241 Return NULL_RTX if a normal call should be emitted rather than expanding
5242 the function inline. If convenient, the result should be placed
5243 in TARGET. SUBTARGET may be used as the target for computing
5247 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
5253 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5256 arg
= CALL_EXPR_ARG (exp
, 0);
5257 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
5258 mode
= TYPE_MODE (TREE_TYPE (arg
));
5259 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5260 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
5263 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5264 Return NULL is a normal call should be emitted rather than expanding the
5265 function inline. If convenient, the result should be placed in TARGET.
5266 SUBTARGET may be used as the target for computing the operand. */
5269 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
5274 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
5277 arg
= CALL_EXPR_ARG (exp
, 0);
5278 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5280 arg
= CALL_EXPR_ARG (exp
, 1);
5281 op1
= expand_normal (arg
);
5283 return expand_copysign (op0
, op1
, target
);
5286 /* Expand a call to __builtin___clear_cache. */
5289 expand_builtin___clear_cache (tree exp
)
5291 if (!targetm
.code_for_clear_cache
)
5293 #ifdef CLEAR_INSN_CACHE
5294 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5295 does something. Just do the default expansion to a call to
5299 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5300 does nothing. There is no need to call it. Do nothing. */
5302 #endif /* CLEAR_INSN_CACHE */
5305 /* We have a "clear_cache" insn, and it will handle everything. */
5307 rtx begin_rtx
, end_rtx
;
5309 /* We must not expand to a library call. If we did, any
5310 fallback library function in libgcc that might contain a call to
5311 __builtin___clear_cache() would recurse infinitely. */
5312 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
5314 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5318 if (targetm
.have_clear_cache ())
5320 struct expand_operand ops
[2];
5322 begin
= CALL_EXPR_ARG (exp
, 0);
5323 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5325 end
= CALL_EXPR_ARG (exp
, 1);
5326 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5328 create_address_operand (&ops
[0], begin_rtx
);
5329 create_address_operand (&ops
[1], end_rtx
);
5330 if (maybe_expand_insn (targetm
.code_for_clear_cache
, 2, ops
))
5336 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5339 round_trampoline_addr (rtx tramp
)
5341 rtx temp
, addend
, mask
;
5343 /* If we don't need too much alignment, we'll have been guaranteed
5344 proper alignment by get_trampoline_type. */
5345 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
5348 /* Round address up to desired boundary. */
5349 temp
= gen_reg_rtx (Pmode
);
5350 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
5351 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
5353 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
5354 temp
, 0, OPTAB_LIB_WIDEN
);
5355 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
5356 temp
, 0, OPTAB_LIB_WIDEN
);
5362 expand_builtin_init_trampoline (tree exp
, bool onstack
)
5364 tree t_tramp
, t_func
, t_chain
;
5365 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
5367 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
5368 POINTER_TYPE
, VOID_TYPE
))
5371 t_tramp
= CALL_EXPR_ARG (exp
, 0);
5372 t_func
= CALL_EXPR_ARG (exp
, 1);
5373 t_chain
= CALL_EXPR_ARG (exp
, 2);
5375 r_tramp
= expand_normal (t_tramp
);
5376 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
5377 MEM_NOTRAP_P (m_tramp
) = 1;
5379 /* If ONSTACK, the TRAMP argument should be the address of a field
5380 within the local function's FRAME decl. Either way, let's see if
5381 we can fill in the MEM_ATTRs for this memory. */
5382 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
5383 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
5385 /* Creator of a heap trampoline is responsible for making sure the
5386 address is aligned to at least STACK_BOUNDARY. Normally malloc
5387 will ensure this anyhow. */
5388 tmp
= round_trampoline_addr (r_tramp
);
5391 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
5392 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
5393 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
5396 /* The FUNC argument should be the address of the nested function.
5397 Extract the actual function decl to pass to the hook. */
5398 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
5399 t_func
= TREE_OPERAND (t_func
, 0);
5400 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
5402 r_chain
= expand_normal (t_chain
);
5404 /* Generate insns to initialize the trampoline. */
5405 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
5409 trampolines_created
= 1;
5411 if (targetm
.calls
.custom_function_descriptors
!= 0)
5412 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
5413 "trampoline generated for nested function %qD", t_func
);
5420 expand_builtin_adjust_trampoline (tree exp
)
5424 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5427 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5428 tramp
= round_trampoline_addr (tramp
);
5429 if (targetm
.calls
.trampoline_adjust_address
)
5430 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
5435 /* Expand a call to the builtin descriptor initialization routine.
5436 A descriptor is made up of a couple of pointers to the static
5437 chain and the code entry in this order. */
5440 expand_builtin_init_descriptor (tree exp
)
5442 tree t_descr
, t_func
, t_chain
;
5443 rtx m_descr
, r_descr
, r_func
, r_chain
;
5445 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, POINTER_TYPE
,
5449 t_descr
= CALL_EXPR_ARG (exp
, 0);
5450 t_func
= CALL_EXPR_ARG (exp
, 1);
5451 t_chain
= CALL_EXPR_ARG (exp
, 2);
5453 r_descr
= expand_normal (t_descr
);
5454 m_descr
= gen_rtx_MEM (BLKmode
, r_descr
);
5455 MEM_NOTRAP_P (m_descr
) = 1;
5457 r_func
= expand_normal (t_func
);
5458 r_chain
= expand_normal (t_chain
);
5460 /* Generate insns to initialize the descriptor. */
5461 emit_move_insn (adjust_address_nv (m_descr
, ptr_mode
, 0), r_chain
);
5462 emit_move_insn (adjust_address_nv (m_descr
, ptr_mode
,
5463 POINTER_SIZE
/ BITS_PER_UNIT
), r_func
);
5468 /* Expand a call to the builtin descriptor adjustment routine. */
5471 expand_builtin_adjust_descriptor (tree exp
)
5475 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5478 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5480 /* Unalign the descriptor to allow runtime identification. */
5481 tramp
= plus_constant (ptr_mode
, tramp
,
5482 targetm
.calls
.custom_function_descriptors
);
5484 return force_operand (tramp
, NULL_RTX
);
5487 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5488 function. The function first checks whether the back end provides
5489 an insn to implement signbit for the respective mode. If not, it
5490 checks whether the floating point format of the value is such that
5491 the sign bit can be extracted. If that is not the case, error out.
5492 EXP is the expression that is a call to the builtin function; if
5493 convenient, the result should be placed in TARGET. */
5495 expand_builtin_signbit (tree exp
, rtx target
)
5497 const struct real_format
*fmt
;
5498 scalar_float_mode fmode
;
5499 scalar_int_mode rmode
, imode
;
5502 enum insn_code icode
;
5504 location_t loc
= EXPR_LOCATION (exp
);
5506 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5509 arg
= CALL_EXPR_ARG (exp
, 0);
5510 fmode
= SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg
));
5511 rmode
= SCALAR_INT_TYPE_MODE (TREE_TYPE (exp
));
5512 fmt
= REAL_MODE_FORMAT (fmode
);
5514 arg
= builtin_save_expr (arg
);
5516 /* Expand the argument yielding a RTX expression. */
5517 temp
= expand_normal (arg
);
5519 /* Check if the back end provides an insn that handles signbit for the
5521 icode
= optab_handler (signbit_optab
, fmode
);
5522 if (icode
!= CODE_FOR_nothing
)
5524 rtx_insn
*last
= get_last_insn ();
5525 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
5526 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
5528 delete_insns_since (last
);
5531 /* For floating point formats without a sign bit, implement signbit
5533 bitpos
= fmt
->signbit_ro
;
5536 /* But we can't do this if the format supports signed zero. */
5537 gcc_assert (!fmt
->has_signed_zero
|| !HONOR_SIGNED_ZEROS (fmode
));
5539 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
5540 build_real (TREE_TYPE (arg
), dconst0
));
5541 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5544 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
5546 imode
= int_mode_for_mode (fmode
).require ();
5547 temp
= gen_lowpart (imode
, temp
);
5552 /* Handle targets with different FP word orders. */
5553 if (FLOAT_WORDS_BIG_ENDIAN
)
5554 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
5556 word
= bitpos
/ BITS_PER_WORD
;
5557 temp
= operand_subword_force (temp
, word
, fmode
);
5558 bitpos
= bitpos
% BITS_PER_WORD
;
5561 /* Force the intermediate word_mode (or narrower) result into a
5562 register. This avoids attempting to create paradoxical SUBREGs
5563 of floating point modes below. */
5564 temp
= force_reg (imode
, temp
);
5566 /* If the bitpos is within the "result mode" lowpart, the operation
5567 can be implement with a single bitwise AND. Otherwise, we need
5568 a right shift and an AND. */
5570 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5572 wide_int mask
= wi::set_bit_in_zero (bitpos
, GET_MODE_PRECISION (rmode
));
5574 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
5575 temp
= gen_lowpart (rmode
, temp
);
5576 temp
= expand_binop (rmode
, and_optab
, temp
,
5577 immed_wide_int_const (mask
, rmode
),
5578 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5582 /* Perform a logical right shift to place the signbit in the least
5583 significant bit, then truncate the result to the desired mode
5584 and mask just this bit. */
5585 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
5586 temp
= gen_lowpart (rmode
, temp
);
5587 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5588 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5594 /* Expand fork or exec calls. TARGET is the desired target of the
5595 call. EXP is the call. FN is the
5596 identificator of the actual function. IGNORE is nonzero if the
5597 value is to be ignored. */
5600 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5605 /* If we are not profiling, just call the function. */
5606 if (!profile_arc_flag
)
5609 /* Otherwise call the wrapper. This should be equivalent for the rest of
5610 compiler, so the code does not diverge, and the wrapper may run the
5611 code necessary for keeping the profiling sane. */
5613 switch (DECL_FUNCTION_CODE (fn
))
5616 id
= get_identifier ("__gcov_fork");
5619 case BUILT_IN_EXECL
:
5620 id
= get_identifier ("__gcov_execl");
5623 case BUILT_IN_EXECV
:
5624 id
= get_identifier ("__gcov_execv");
5627 case BUILT_IN_EXECLP
:
5628 id
= get_identifier ("__gcov_execlp");
5631 case BUILT_IN_EXECLE
:
5632 id
= get_identifier ("__gcov_execle");
5635 case BUILT_IN_EXECVP
:
5636 id
= get_identifier ("__gcov_execvp");
5639 case BUILT_IN_EXECVE
:
5640 id
= get_identifier ("__gcov_execve");
5647 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
5648 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5649 DECL_EXTERNAL (decl
) = 1;
5650 TREE_PUBLIC (decl
) = 1;
5651 DECL_ARTIFICIAL (decl
) = 1;
5652 TREE_NOTHROW (decl
) = 1;
5653 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5654 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5655 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
5656 return expand_call (call
, target
, ignore
);
5661 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5662 the pointer in these functions is void*, the tree optimizers may remove
5663 casts. The mode computed in expand_builtin isn't reliable either, due
5664 to __sync_bool_compare_and_swap.
5666 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5667 group of builtins. This gives us log2 of the mode size. */
5669 static inline machine_mode
5670 get_builtin_sync_mode (int fcode_diff
)
5672 /* The size is not negotiable, so ask not to get BLKmode in return
5673 if the target indicates that a smaller size would be better. */
5674 return int_mode_for_size (BITS_PER_UNIT
<< fcode_diff
, 0).require ();
5677 /* Expand the memory expression LOC and return the appropriate memory operand
5678 for the builtin_sync operations. */
5681 get_builtin_sync_mem (tree loc
, machine_mode mode
)
5685 addr
= expand_expr (loc
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
5686 addr
= convert_memory_address (Pmode
, addr
);
5688 /* Note that we explicitly do not want any alias information for this
5689 memory, so that we kill all other live memories. Otherwise we don't
5690 satisfy the full barrier semantics of the intrinsic. */
5691 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5693 /* The alignment needs to be at least according to that of the mode. */
5694 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
5695 get_pointer_alignment (loc
)));
5696 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5697 MEM_VOLATILE_P (mem
) = 1;
5702 /* Make sure an argument is in the right mode.
5703 EXP is the tree argument.
5704 MODE is the mode it should be in. */
5707 expand_expr_force_mode (tree exp
, machine_mode mode
)
5710 machine_mode old_mode
;
5712 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
5713 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5714 of CONST_INTs, where we know the old_mode only from the call argument. */
5716 old_mode
= GET_MODE (val
);
5717 if (old_mode
== VOIDmode
)
5718 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
5719 val
= convert_modes (mode
, old_mode
, val
, 1);
5724 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5725 EXP is the CALL_EXPR. CODE is the rtx code
5726 that corresponds to the arithmetic or logical operation from the name;
5727 an exception here is that NOT actually means NAND. TARGET is an optional
5728 place for us to store the results; AFTER is true if this is the
5729 fetch_and_xxx form. */
5732 expand_builtin_sync_operation (machine_mode mode
, tree exp
,
5733 enum rtx_code code
, bool after
,
5737 location_t loc
= EXPR_LOCATION (exp
);
5739 if (code
== NOT
&& warn_sync_nand
)
5741 tree fndecl
= get_callee_fndecl (exp
);
5742 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5744 static bool warned_f_a_n
, warned_n_a_f
;
5748 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
5749 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
5750 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
5751 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
5752 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
5756 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
5757 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5758 warned_f_a_n
= true;
5761 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
5762 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
5763 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
5764 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
5765 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
5769 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
5770 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
5771 warned_n_a_f
= true;
5779 /* Expand the operands. */
5780 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5781 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5783 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SYNC_SEQ_CST
,
5787 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5788 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5789 true if this is the boolean form. TARGET is a place for us to store the
5790 results; this is NOT optional if IS_BOOL is true. */
5793 expand_builtin_compare_and_swap (machine_mode mode
, tree exp
,
5794 bool is_bool
, rtx target
)
5796 rtx old_val
, new_val
, mem
;
5799 /* Expand the operands. */
5800 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5801 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5802 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5804 pbool
= poval
= NULL
;
5805 if (target
!= const0_rtx
)
5812 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
5813 false, MEMMODEL_SYNC_SEQ_CST
,
5814 MEMMODEL_SYNC_SEQ_CST
))
5820 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5821 general form is actually an atomic exchange, and some targets only
5822 support a reduced form with the second argument being a constant 1.
5823 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5827 expand_builtin_sync_lock_test_and_set (machine_mode mode
, tree exp
,
5832 /* Expand the operands. */
5833 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5834 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5836 return expand_sync_lock_test_and_set (target
, mem
, val
);
5839 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5842 expand_builtin_sync_lock_release (machine_mode mode
, tree exp
)
5846 /* Expand the operands. */
5847 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5849 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_SYNC_RELEASE
, true);
5852 /* Given an integer representing an ``enum memmodel'', verify its
5853 correctness and return the memory model enum. */
5855 static enum memmodel
5856 get_memmodel (tree exp
)
5859 unsigned HOST_WIDE_INT val
;
5861 = expansion_point_location_if_in_system_header (input_location
);
5863 /* If the parameter is not a constant, it's a run time value so we'll just
5864 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5865 if (TREE_CODE (exp
) != INTEGER_CST
)
5866 return MEMMODEL_SEQ_CST
;
5868 op
= expand_normal (exp
);
5871 if (targetm
.memmodel_check
)
5872 val
= targetm
.memmodel_check (val
);
5873 else if (val
& ~MEMMODEL_MASK
)
5875 warning_at (loc
, OPT_Winvalid_memory_model
,
5876 "unknown architecture specifier in memory model to builtin");
5877 return MEMMODEL_SEQ_CST
;
5880 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5881 if (memmodel_base (val
) >= MEMMODEL_LAST
)
5883 warning_at (loc
, OPT_Winvalid_memory_model
,
5884 "invalid memory model argument to builtin");
5885 return MEMMODEL_SEQ_CST
;
5888 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5889 be conservative and promote consume to acquire. */
5890 if (val
== MEMMODEL_CONSUME
)
5891 val
= MEMMODEL_ACQUIRE
;
5893 return (enum memmodel
) val
;
5896 /* Expand the __atomic_exchange intrinsic:
5897 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5898 EXP is the CALL_EXPR.
5899 TARGET is an optional place for us to store the results. */
5902 expand_builtin_atomic_exchange (machine_mode mode
, tree exp
, rtx target
)
5905 enum memmodel model
;
5907 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5909 if (!flag_inline_atomics
)
5912 /* Expand the operands. */
5913 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5914 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5916 return expand_atomic_exchange (target
, mem
, val
, model
);
5919 /* Expand the __atomic_compare_exchange intrinsic:
5920 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5921 TYPE desired, BOOL weak,
5922 enum memmodel success,
5923 enum memmodel failure)
5924 EXP is the CALL_EXPR.
5925 TARGET is an optional place for us to store the results. */
5928 expand_builtin_atomic_compare_exchange (machine_mode mode
, tree exp
,
5931 rtx expect
, desired
, mem
, oldval
;
5932 rtx_code_label
*label
;
5933 enum memmodel success
, failure
;
5937 = expansion_point_location_if_in_system_header (input_location
);
5939 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
5940 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
5942 if (failure
> success
)
5944 warning_at (loc
, OPT_Winvalid_memory_model
,
5945 "failure memory model cannot be stronger than success "
5946 "memory model for %<__atomic_compare_exchange%>");
5947 success
= MEMMODEL_SEQ_CST
;
5950 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
5952 warning_at (loc
, OPT_Winvalid_memory_model
,
5953 "invalid failure memory model for "
5954 "%<__atomic_compare_exchange%>");
5955 failure
= MEMMODEL_SEQ_CST
;
5956 success
= MEMMODEL_SEQ_CST
;
5960 if (!flag_inline_atomics
)
5963 /* Expand the operands. */
5964 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5966 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
5967 expect
= convert_memory_address (Pmode
, expect
);
5968 expect
= gen_rtx_MEM (mode
, expect
);
5969 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5971 weak
= CALL_EXPR_ARG (exp
, 3);
5973 if (tree_fits_shwi_p (weak
) && tree_to_shwi (weak
) != 0)
5976 if (target
== const0_rtx
)
5979 /* Lest the rtl backend create a race condition with an imporoper store
5980 to memory, always create a new pseudo for OLDVAL. */
5983 if (!expand_atomic_compare_and_swap (&target
, &oldval
, mem
, expect
, desired
,
5984 is_weak
, success
, failure
))
5987 /* Conditionally store back to EXPECT, lest we create a race condition
5988 with an improper store to memory. */
5989 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5990 the normal case where EXPECT is totally private, i.e. a register. At
5991 which point the store can be unconditional. */
5992 label
= gen_label_rtx ();
5993 emit_cmp_and_jump_insns (target
, const0_rtx
, NE
, NULL
,
5994 GET_MODE (target
), 1, label
);
5995 emit_move_insn (expect
, oldval
);
6001 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6002 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6003 call. The weak parameter must be dropped to match the expected parameter
6004 list and the expected argument changed from value to pointer to memory
6008 expand_ifn_atomic_compare_exchange_into_call (gcall
*call
, machine_mode mode
)
6011 vec
<tree
, va_gc
> *vec
;
6014 vec
->quick_push (gimple_call_arg (call
, 0));
6015 tree expected
= gimple_call_arg (call
, 1);
6016 rtx x
= assign_stack_temp_for_type (mode
, GET_MODE_SIZE (mode
),
6017 TREE_TYPE (expected
));
6018 rtx expd
= expand_expr (expected
, x
, mode
, EXPAND_NORMAL
);
6020 emit_move_insn (x
, expd
);
6021 tree v
= make_tree (TREE_TYPE (expected
), x
);
6022 vec
->quick_push (build1 (ADDR_EXPR
,
6023 build_pointer_type (TREE_TYPE (expected
)), v
));
6024 vec
->quick_push (gimple_call_arg (call
, 2));
6025 /* Skip the boolean weak parameter. */
6026 for (z
= 4; z
< 6; z
++)
6027 vec
->quick_push (gimple_call_arg (call
, z
));
6028 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6029 unsigned int bytes_log2
= exact_log2 (GET_MODE_SIZE (mode
).to_constant ());
6030 gcc_assert (bytes_log2
< 5);
6031 built_in_function fncode
6032 = (built_in_function
) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6034 tree fndecl
= builtin_decl_explicit (fncode
);
6035 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fndecl
)),
6037 tree exp
= build_call_vec (boolean_type_node
, fn
, vec
);
6038 tree lhs
= gimple_call_lhs (call
);
6039 rtx boolret
= expand_call (exp
, NULL_RTX
, lhs
== NULL_TREE
);
6042 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
6043 if (GET_MODE (boolret
) != mode
)
6044 boolret
= convert_modes (mode
, GET_MODE (boolret
), boolret
, 1);
6045 x
= force_reg (mode
, x
);
6046 write_complex_part (target
, boolret
, true);
6047 write_complex_part (target
, x
, false);
6051 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6054 expand_ifn_atomic_compare_exchange (gcall
*call
)
6056 int size
= tree_to_shwi (gimple_call_arg (call
, 3)) & 255;
6057 gcc_assert (size
== 1 || size
== 2 || size
== 4 || size
== 8 || size
== 16);
6058 machine_mode mode
= int_mode_for_size (BITS_PER_UNIT
* size
, 0).require ();
6059 rtx expect
, desired
, mem
, oldval
, boolret
;
6060 enum memmodel success
, failure
;
6064 = expansion_point_location_if_in_system_header (gimple_location (call
));
6066 success
= get_memmodel (gimple_call_arg (call
, 4));
6067 failure
= get_memmodel (gimple_call_arg (call
, 5));
6069 if (failure
> success
)
6071 warning_at (loc
, OPT_Winvalid_memory_model
,
6072 "failure memory model cannot be stronger than success "
6073 "memory model for %<__atomic_compare_exchange%>");
6074 success
= MEMMODEL_SEQ_CST
;
6077 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
6079 warning_at (loc
, OPT_Winvalid_memory_model
,
6080 "invalid failure memory model for "
6081 "%<__atomic_compare_exchange%>");
6082 failure
= MEMMODEL_SEQ_CST
;
6083 success
= MEMMODEL_SEQ_CST
;
6086 if (!flag_inline_atomics
)
6088 expand_ifn_atomic_compare_exchange_into_call (call
, mode
);
6092 /* Expand the operands. */
6093 mem
= get_builtin_sync_mem (gimple_call_arg (call
, 0), mode
);
6095 expect
= expand_expr_force_mode (gimple_call_arg (call
, 1), mode
);
6096 desired
= expand_expr_force_mode (gimple_call_arg (call
, 2), mode
);
6098 is_weak
= (tree_to_shwi (gimple_call_arg (call
, 3)) & 256) != 0;
6103 if (!expand_atomic_compare_and_swap (&boolret
, &oldval
, mem
, expect
, desired
,
6104 is_weak
, success
, failure
))
6106 expand_ifn_atomic_compare_exchange_into_call (call
, mode
);
6110 lhs
= gimple_call_lhs (call
);
6113 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
6114 if (GET_MODE (boolret
) != mode
)
6115 boolret
= convert_modes (mode
, GET_MODE (boolret
), boolret
, 1);
6116 write_complex_part (target
, boolret
, true);
6117 write_complex_part (target
, oldval
, false);
6121 /* Expand the __atomic_load intrinsic:
6122 TYPE __atomic_load (TYPE *object, enum memmodel)
6123 EXP is the CALL_EXPR.
6124 TARGET is an optional place for us to store the results. */
6127 expand_builtin_atomic_load (machine_mode mode
, tree exp
, rtx target
)
6130 enum memmodel model
;
6132 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
6133 if (is_mm_release (model
) || is_mm_acq_rel (model
))
6136 = expansion_point_location_if_in_system_header (input_location
);
6137 warning_at (loc
, OPT_Winvalid_memory_model
,
6138 "invalid memory model for %<__atomic_load%>");
6139 model
= MEMMODEL_SEQ_CST
;
6142 if (!flag_inline_atomics
)
6145 /* Expand the operand. */
6146 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6148 return expand_atomic_load (target
, mem
, model
);
6152 /* Expand the __atomic_store intrinsic:
6153 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6154 EXP is the CALL_EXPR.
6155 TARGET is an optional place for us to store the results. */
6158 expand_builtin_atomic_store (machine_mode mode
, tree exp
)
6161 enum memmodel model
;
6163 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
6164 if (!(is_mm_relaxed (model
) || is_mm_seq_cst (model
)
6165 || is_mm_release (model
)))
6168 = expansion_point_location_if_in_system_header (input_location
);
6169 warning_at (loc
, OPT_Winvalid_memory_model
,
6170 "invalid memory model for %<__atomic_store%>");
6171 model
= MEMMODEL_SEQ_CST
;
6174 if (!flag_inline_atomics
)
6177 /* Expand the operands. */
6178 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6179 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6181 return expand_atomic_store (mem
, val
, model
, false);
6184 /* Expand the __atomic_fetch_XXX intrinsic:
6185 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6186 EXP is the CALL_EXPR.
6187 TARGET is an optional place for us to store the results.
6188 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6189 FETCH_AFTER is true if returning the result of the operation.
6190 FETCH_AFTER is false if returning the value before the operation.
6191 IGNORE is true if the result is not used.
6192 EXT_CALL is the correct builtin for an external call if this cannot be
6193 resolved to an instruction sequence. */
6196 expand_builtin_atomic_fetch_op (machine_mode mode
, tree exp
, rtx target
,
6197 enum rtx_code code
, bool fetch_after
,
6198 bool ignore
, enum built_in_function ext_call
)
6201 enum memmodel model
;
6205 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
6207 /* Expand the operands. */
6208 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6209 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
6211 /* Only try generating instructions if inlining is turned on. */
6212 if (flag_inline_atomics
)
6214 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
6219 /* Return if a different routine isn't needed for the library call. */
6220 if (ext_call
== BUILT_IN_NONE
)
6223 /* Change the call to the specified function. */
6224 fndecl
= get_callee_fndecl (exp
);
6225 addr
= CALL_EXPR_FN (exp
);
6228 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
6229 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
6231 /* If we will emit code after the call, the call can not be a tail call.
6232 If it is emitted as a tail call, a barrier is emitted after it, and
6233 then all trailing code is removed. */
6235 CALL_EXPR_TAILCALL (exp
) = 0;
6237 /* Expand the call here so we can emit trailing code. */
6238 ret
= expand_call (exp
, target
, ignore
);
6240 /* Replace the original function just in case it matters. */
6241 TREE_OPERAND (addr
, 0) = fndecl
;
6243 /* Then issue the arithmetic correction to return the right result. */
6248 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
6250 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
6253 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
6259 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6262 expand_ifn_atomic_bit_test_and (gcall
*call
)
6264 tree ptr
= gimple_call_arg (call
, 0);
6265 tree bit
= gimple_call_arg (call
, 1);
6266 tree flag
= gimple_call_arg (call
, 2);
6267 tree lhs
= gimple_call_lhs (call
);
6268 enum memmodel model
= MEMMODEL_SYNC_SEQ_CST
;
6269 machine_mode mode
= TYPE_MODE (TREE_TYPE (flag
));
6272 struct expand_operand ops
[5];
6274 gcc_assert (flag_inline_atomics
);
6276 if (gimple_call_num_args (call
) == 4)
6277 model
= get_memmodel (gimple_call_arg (call
, 3));
6279 rtx mem
= get_builtin_sync_mem (ptr
, mode
);
6280 rtx val
= expand_expr_force_mode (bit
, mode
);
6282 switch (gimple_call_internal_fn (call
))
6284 case IFN_ATOMIC_BIT_TEST_AND_SET
:
6286 optab
= atomic_bit_test_and_set_optab
;
6288 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT
:
6290 optab
= atomic_bit_test_and_complement_optab
;
6292 case IFN_ATOMIC_BIT_TEST_AND_RESET
:
6294 optab
= atomic_bit_test_and_reset_optab
;
6300 if (lhs
== NULL_TREE
)
6302 val
= expand_simple_binop (mode
, ASHIFT
, const1_rtx
,
6303 val
, NULL_RTX
, true, OPTAB_DIRECT
);
6305 val
= expand_simple_unop (mode
, NOT
, val
, NULL_RTX
, true);
6306 expand_atomic_fetch_op (const0_rtx
, mem
, val
, code
, model
, false);
6310 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
6311 enum insn_code icode
= direct_optab_handler (optab
, mode
);
6312 gcc_assert (icode
!= CODE_FOR_nothing
);
6313 create_output_operand (&ops
[0], target
, mode
);
6314 create_fixed_operand (&ops
[1], mem
);
6315 create_convert_operand_to (&ops
[2], val
, mode
, true);
6316 create_integer_operand (&ops
[3], model
);
6317 create_integer_operand (&ops
[4], integer_onep (flag
));
6318 if (maybe_expand_insn (icode
, 5, ops
))
6322 val
= expand_simple_binop (mode
, ASHIFT
, const1_rtx
,
6323 val
, NULL_RTX
, true, OPTAB_DIRECT
);
6326 val
= expand_simple_unop (mode
, NOT
, val
, NULL_RTX
, true);
6327 rtx result
= expand_atomic_fetch_op (gen_reg_rtx (mode
), mem
, val
,
6328 code
, model
, false);
6329 if (integer_onep (flag
))
6331 result
= expand_simple_binop (mode
, ASHIFTRT
, result
, bitval
,
6332 NULL_RTX
, true, OPTAB_DIRECT
);
6333 result
= expand_simple_binop (mode
, AND
, result
, const1_rtx
, target
,
6334 true, OPTAB_DIRECT
);
6337 result
= expand_simple_binop (mode
, AND
, result
, maskval
, target
, true,
6339 if (result
!= target
)
6340 emit_move_insn (target
, result
);
6343 /* Expand an atomic clear operation.
6344 void _atomic_clear (BOOL *obj, enum memmodel)
6345 EXP is the call expression. */
6348 expand_builtin_atomic_clear (tree exp
)
6352 enum memmodel model
;
6354 mode
= int_mode_for_size (BOOL_TYPE_SIZE
, 0).require ();
6355 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6356 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
6358 if (is_mm_consume (model
) || is_mm_acquire (model
) || is_mm_acq_rel (model
))
6361 = expansion_point_location_if_in_system_header (input_location
);
6362 warning_at (loc
, OPT_Winvalid_memory_model
,
6363 "invalid memory model for %<__atomic_store%>");
6364 model
= MEMMODEL_SEQ_CST
;
6367 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6368 Failing that, a store is issued by __atomic_store. The only way this can
6369 fail is if the bool type is larger than a word size. Unlikely, but
6370 handle it anyway for completeness. Assume a single threaded model since
6371 there is no atomic support in this case, and no barriers are required. */
6372 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
6374 emit_move_insn (mem
, const0_rtx
);
6378 /* Expand an atomic test_and_set operation.
6379 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6380 EXP is the call expression. */
6383 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
6386 enum memmodel model
;
6389 mode
= int_mode_for_size (BOOL_TYPE_SIZE
, 0).require ();
6390 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6391 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
6393 return expand_atomic_test_and_set (target
, mem
, model
);
6397 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6398 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6401 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
6405 unsigned int mode_align
, type_align
;
6407 if (TREE_CODE (arg0
) != INTEGER_CST
)
6410 /* We need a corresponding integer mode for the access to be lock-free. */
6411 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
6412 if (!int_mode_for_size (size
, 0).exists (&mode
))
6413 return boolean_false_node
;
6415 mode_align
= GET_MODE_ALIGNMENT (mode
);
6417 if (TREE_CODE (arg1
) == INTEGER_CST
)
6419 unsigned HOST_WIDE_INT val
= UINTVAL (expand_normal (arg1
));
6421 /* Either this argument is null, or it's a fake pointer encoding
6422 the alignment of the object. */
6423 val
= least_bit_hwi (val
);
6424 val
*= BITS_PER_UNIT
;
6426 if (val
== 0 || mode_align
< val
)
6427 type_align
= mode_align
;
6433 tree ttype
= TREE_TYPE (arg1
);
6435 /* This function is usually invoked and folded immediately by the front
6436 end before anything else has a chance to look at it. The pointer
6437 parameter at this point is usually cast to a void *, so check for that
6438 and look past the cast. */
6439 if (CONVERT_EXPR_P (arg1
)
6440 && POINTER_TYPE_P (ttype
)
6441 && VOID_TYPE_P (TREE_TYPE (ttype
))
6442 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
6443 arg1
= TREE_OPERAND (arg1
, 0);
6445 ttype
= TREE_TYPE (arg1
);
6446 gcc_assert (POINTER_TYPE_P (ttype
));
6448 /* Get the underlying type of the object. */
6449 ttype
= TREE_TYPE (ttype
);
6450 type_align
= TYPE_ALIGN (ttype
);
6453 /* If the object has smaller alignment, the lock free routines cannot
6455 if (type_align
< mode_align
)
6456 return boolean_false_node
;
6458 /* Check if a compare_and_swap pattern exists for the mode which represents
6459 the required size. The pattern is not allowed to fail, so the existence
6460 of the pattern indicates support is present. Also require that an
6461 atomic load exists for the required size. */
6462 if (can_compare_and_swap_p (mode
, true) && can_atomic_load_p (mode
))
6463 return boolean_true_node
;
6465 return boolean_false_node
;
6468 /* Return true if the parameters to call EXP represent an object which will
6469 always generate lock free instructions. The first argument represents the
6470 size of the object, and the second parameter is a pointer to the object
6471 itself. If NULL is passed for the object, then the result is based on
6472 typical alignment for an object of the specified size. Otherwise return
6476 expand_builtin_atomic_always_lock_free (tree exp
)
6479 tree arg0
= CALL_EXPR_ARG (exp
, 0);
6480 tree arg1
= CALL_EXPR_ARG (exp
, 1);
6482 if (TREE_CODE (arg0
) != INTEGER_CST
)
6484 error ("non-constant argument 1 to __atomic_always_lock_free");
6488 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
6489 if (size
== boolean_true_node
)
6494 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6495 is lock free on this architecture. */
6498 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
6500 if (!flag_inline_atomics
)
6503 /* If it isn't always lock free, don't generate a result. */
6504 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
6505 return boolean_true_node
;
6510 /* Return true if the parameters to call EXP represent an object which will
6511 always generate lock free instructions. The first argument represents the
6512 size of the object, and the second parameter is a pointer to the object
6513 itself. If NULL is passed for the object, then the result is based on
6514 typical alignment for an object of the specified size. Otherwise return
6518 expand_builtin_atomic_is_lock_free (tree exp
)
6521 tree arg0
= CALL_EXPR_ARG (exp
, 0);
6522 tree arg1
= CALL_EXPR_ARG (exp
, 1);
6524 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
6526 error ("non-integer argument 1 to __atomic_is_lock_free");
6530 if (!flag_inline_atomics
)
6533 /* If the value is known at compile time, return the RTX for it. */
6534 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
6535 if (size
== boolean_true_node
)
6541 /* Expand the __atomic_thread_fence intrinsic:
6542 void __atomic_thread_fence (enum memmodel)
6543 EXP is the CALL_EXPR. */
6546 expand_builtin_atomic_thread_fence (tree exp
)
6548 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
6549 expand_mem_thread_fence (model
);
6552 /* Expand the __atomic_signal_fence intrinsic:
6553 void __atomic_signal_fence (enum memmodel)
6554 EXP is the CALL_EXPR. */
6557 expand_builtin_atomic_signal_fence (tree exp
)
6559 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
6560 expand_mem_signal_fence (model
);
6563 /* Expand the __sync_synchronize intrinsic. */
6566 expand_builtin_sync_synchronize (void)
6568 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST
);
6572 expand_builtin_thread_pointer (tree exp
, rtx target
)
6574 enum insn_code icode
;
6575 if (!validate_arglist (exp
, VOID_TYPE
))
6577 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
6578 if (icode
!= CODE_FOR_nothing
)
6580 struct expand_operand op
;
6581 /* If the target is not sutitable then create a new target. */
6582 if (target
== NULL_RTX
6584 || GET_MODE (target
) != Pmode
)
6585 target
= gen_reg_rtx (Pmode
);
6586 create_output_operand (&op
, target
, Pmode
);
6587 expand_insn (icode
, 1, &op
);
6590 error ("__builtin_thread_pointer is not supported on this target");
6595 expand_builtin_set_thread_pointer (tree exp
)
6597 enum insn_code icode
;
6598 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6600 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
6601 if (icode
!= CODE_FOR_nothing
)
6603 struct expand_operand op
;
6604 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
6605 Pmode
, EXPAND_NORMAL
);
6606 create_input_operand (&op
, val
, Pmode
);
6607 expand_insn (icode
, 1, &op
);
6610 error ("__builtin_set_thread_pointer is not supported on this target");
6614 /* Emit code to restore the current value of stack. */
6617 expand_stack_restore (tree var
)
6620 rtx sa
= expand_normal (var
);
6622 sa
= convert_memory_address (Pmode
, sa
);
6624 prev
= get_last_insn ();
6625 emit_stack_restore (SAVE_BLOCK
, sa
);
6627 record_new_stack_level ();
6629 fixup_args_size_notes (prev
, get_last_insn (), 0);
6632 /* Emit code to save the current value of stack. */
6635 expand_stack_save (void)
6639 emit_stack_save (SAVE_BLOCK
, &ret
);
6643 /* Emit code to get the openacc gang, worker or vector id or size. */
6646 expand_builtin_goacc_parlevel_id_size (tree exp
, rtx target
, int ignore
)
6649 rtx fallback_retval
;
6650 rtx_insn
*(*gen_fn
) (rtx
, rtx
);
6651 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp
)))
6653 case BUILT_IN_GOACC_PARLEVEL_ID
:
6654 name
= "__builtin_goacc_parlevel_id";
6655 fallback_retval
= const0_rtx
;
6656 gen_fn
= targetm
.gen_oacc_dim_pos
;
6658 case BUILT_IN_GOACC_PARLEVEL_SIZE
:
6659 name
= "__builtin_goacc_parlevel_size";
6660 fallback_retval
= const1_rtx
;
6661 gen_fn
= targetm
.gen_oacc_dim_size
;
6667 if (oacc_get_fn_attrib (current_function_decl
) == NULL_TREE
)
6669 error ("%qs only supported in OpenACC code", name
);
6673 tree arg
= CALL_EXPR_ARG (exp
, 0);
6674 if (TREE_CODE (arg
) != INTEGER_CST
)
6676 error ("non-constant argument 0 to %qs", name
);
6680 int dim
= TREE_INT_CST_LOW (arg
);
6684 case GOMP_DIM_WORKER
:
6685 case GOMP_DIM_VECTOR
:
6688 error ("illegal argument 0 to %qs", name
);
6695 if (target
== NULL_RTX
)
6696 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
6698 if (!targetm
.have_oacc_dim_size ())
6700 emit_move_insn (target
, fallback_retval
);
6704 rtx reg
= MEM_P (target
) ? gen_reg_rtx (GET_MODE (target
)) : target
;
6705 emit_insn (gen_fn (reg
, GEN_INT (dim
)));
6707 emit_move_insn (target
, reg
);
6712 /* Expand an expression EXP that calls a built-in function,
6713 with result going to TARGET if that's convenient
6714 (and in mode MODE if that's convenient).
6715 SUBTARGET may be used as the target for computing one of EXP's operands.
6716 IGNORE is nonzero if the value is to be ignored. */
6719 expand_builtin (tree exp
, rtx target
, rtx subtarget
, machine_mode mode
,
6722 tree fndecl
= get_callee_fndecl (exp
);
6723 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
6724 machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
6727 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6728 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
6730 /* When ASan is enabled, we don't want to expand some memory/string
6731 builtins and rely on libsanitizer's hooks. This allows us to avoid
6732 redundant checks and be sure, that possible overflow will be detected
6735 if ((flag_sanitize
& SANITIZE_ADDRESS
) && asan_intercepted_p (fcode
))
6736 return expand_call (exp
, target
, ignore
);
6738 /* When not optimizing, generate calls to library functions for a certain
6741 && !called_as_built_in (fndecl
)
6742 && fcode
!= BUILT_IN_FORK
6743 && fcode
!= BUILT_IN_EXECL
6744 && fcode
!= BUILT_IN_EXECV
6745 && fcode
!= BUILT_IN_EXECLP
6746 && fcode
!= BUILT_IN_EXECLE
6747 && fcode
!= BUILT_IN_EXECVP
6748 && fcode
!= BUILT_IN_EXECVE
6749 && !ALLOCA_FUNCTION_CODE_P (fcode
)
6750 && fcode
!= BUILT_IN_FREE
6751 && fcode
!= BUILT_IN_CHKP_SET_PTR_BOUNDS
6752 && fcode
!= BUILT_IN_CHKP_INIT_PTR_BOUNDS
6753 && fcode
!= BUILT_IN_CHKP_NULL_PTR_BOUNDS
6754 && fcode
!= BUILT_IN_CHKP_COPY_PTR_BOUNDS
6755 && fcode
!= BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6756 && fcode
!= BUILT_IN_CHKP_STORE_PTR_BOUNDS
6757 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6758 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6759 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6760 && fcode
!= BUILT_IN_CHKP_GET_PTR_LBOUND
6761 && fcode
!= BUILT_IN_CHKP_GET_PTR_UBOUND
6762 && fcode
!= BUILT_IN_CHKP_BNDRET
)
6763 return expand_call (exp
, target
, ignore
);
6765 /* The built-in function expanders test for target == const0_rtx
6766 to determine whether the function's result will be ignored. */
6768 target
= const0_rtx
;
6770 /* If the result of a pure or const built-in function is ignored, and
6771 none of its arguments are volatile, we can avoid expanding the
6772 built-in call and just evaluate the arguments for side-effects. */
6773 if (target
== const0_rtx
6774 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
6775 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
6777 bool volatilep
= false;
6779 call_expr_arg_iterator iter
;
6781 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
6782 if (TREE_THIS_VOLATILE (arg
))
6790 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
6791 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6796 /* expand_builtin_with_bounds is supposed to be used for
6797 instrumented builtin calls. */
6798 gcc_assert (!CALL_WITH_BOUNDS_P (exp
));
6802 CASE_FLT_FN (BUILT_IN_FABS
):
6803 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
6804 case BUILT_IN_FABSD32
:
6805 case BUILT_IN_FABSD64
:
6806 case BUILT_IN_FABSD128
:
6807 target
= expand_builtin_fabs (exp
, target
, subtarget
);
6812 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
6813 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN
):
6814 target
= expand_builtin_copysign (exp
, target
, subtarget
);
6819 /* Just do a normal library call if we were unable to fold
6821 CASE_FLT_FN (BUILT_IN_CABS
):
6824 CASE_FLT_FN (BUILT_IN_FMA
):
6825 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA
):
6826 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
6831 CASE_FLT_FN (BUILT_IN_ILOGB
):
6832 if (! flag_unsafe_math_optimizations
)
6835 CASE_FLT_FN (BUILT_IN_ISINF
):
6836 CASE_FLT_FN (BUILT_IN_FINITE
):
6837 case BUILT_IN_ISFINITE
:
6838 case BUILT_IN_ISNORMAL
:
6839 target
= expand_builtin_interclass_mathfn (exp
, target
);
6844 CASE_FLT_FN (BUILT_IN_ICEIL
):
6845 CASE_FLT_FN (BUILT_IN_LCEIL
):
6846 CASE_FLT_FN (BUILT_IN_LLCEIL
):
6847 CASE_FLT_FN (BUILT_IN_LFLOOR
):
6848 CASE_FLT_FN (BUILT_IN_IFLOOR
):
6849 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
6850 target
= expand_builtin_int_roundingfn (exp
, target
);
6855 CASE_FLT_FN (BUILT_IN_IRINT
):
6856 CASE_FLT_FN (BUILT_IN_LRINT
):
6857 CASE_FLT_FN (BUILT_IN_LLRINT
):
6858 CASE_FLT_FN (BUILT_IN_IROUND
):
6859 CASE_FLT_FN (BUILT_IN_LROUND
):
6860 CASE_FLT_FN (BUILT_IN_LLROUND
):
6861 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
6866 CASE_FLT_FN (BUILT_IN_POWI
):
6867 target
= expand_builtin_powi (exp
, target
);
6872 CASE_FLT_FN (BUILT_IN_CEXPI
):
6873 target
= expand_builtin_cexpi (exp
, target
);
6874 gcc_assert (target
);
6877 CASE_FLT_FN (BUILT_IN_SIN
):
6878 CASE_FLT_FN (BUILT_IN_COS
):
6879 if (! flag_unsafe_math_optimizations
)
6881 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
6886 CASE_FLT_FN (BUILT_IN_SINCOS
):
6887 if (! flag_unsafe_math_optimizations
)
6889 target
= expand_builtin_sincos (exp
);
6894 case BUILT_IN_APPLY_ARGS
:
6895 return expand_builtin_apply_args ();
6897 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6898 FUNCTION with a copy of the parameters described by
6899 ARGUMENTS, and ARGSIZE. It returns a block of memory
6900 allocated on the stack into which is stored all the registers
6901 that might possibly be used for returning the result of a
6902 function. ARGUMENTS is the value returned by
6903 __builtin_apply_args. ARGSIZE is the number of bytes of
6904 arguments that must be copied. ??? How should this value be
6905 computed? We'll also need a safe worst case value for varargs
6907 case BUILT_IN_APPLY
:
6908 if (!validate_arglist (exp
, POINTER_TYPE
,
6909 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
6910 && !validate_arglist (exp
, REFERENCE_TYPE
,
6911 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6917 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
6918 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
6919 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
6921 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6924 /* __builtin_return (RESULT) causes the function to return the
6925 value described by RESULT. RESULT is address of the block of
6926 memory returned by __builtin_apply. */
6927 case BUILT_IN_RETURN
:
6928 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6929 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6932 case BUILT_IN_SAVEREGS
:
6933 return expand_builtin_saveregs ();
6935 case BUILT_IN_VA_ARG_PACK
:
6936 /* All valid uses of __builtin_va_arg_pack () are removed during
6938 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
6941 case BUILT_IN_VA_ARG_PACK_LEN
:
6942 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6944 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
6947 /* Return the address of the first anonymous stack arg. */
6948 case BUILT_IN_NEXT_ARG
:
6949 if (fold_builtin_next_arg (exp
, false))
6951 return expand_builtin_next_arg ();
6953 case BUILT_IN_CLEAR_CACHE
:
6954 target
= expand_builtin___clear_cache (exp
);
6959 case BUILT_IN_CLASSIFY_TYPE
:
6960 return expand_builtin_classify_type (exp
);
6962 case BUILT_IN_CONSTANT_P
:
6965 case BUILT_IN_FRAME_ADDRESS
:
6966 case BUILT_IN_RETURN_ADDRESS
:
6967 return expand_builtin_frame_address (fndecl
, exp
);
6969 /* Returns the address of the area where the structure is returned.
6971 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6972 if (call_expr_nargs (exp
) != 0
6973 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6974 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6977 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6979 CASE_BUILT_IN_ALLOCA
:
6980 target
= expand_builtin_alloca (exp
);
6985 case BUILT_IN_ASAN_ALLOCAS_UNPOISON
:
6986 return expand_asan_emit_allocas_unpoison (exp
);
6988 case BUILT_IN_STACK_SAVE
:
6989 return expand_stack_save ();
6991 case BUILT_IN_STACK_RESTORE
:
6992 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6995 case BUILT_IN_BSWAP16
:
6996 case BUILT_IN_BSWAP32
:
6997 case BUILT_IN_BSWAP64
:
6998 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
7003 CASE_INT_FN (BUILT_IN_FFS
):
7004 target
= expand_builtin_unop (target_mode
, exp
, target
,
7005 subtarget
, ffs_optab
);
7010 CASE_INT_FN (BUILT_IN_CLZ
):
7011 target
= expand_builtin_unop (target_mode
, exp
, target
,
7012 subtarget
, clz_optab
);
7017 CASE_INT_FN (BUILT_IN_CTZ
):
7018 target
= expand_builtin_unop (target_mode
, exp
, target
,
7019 subtarget
, ctz_optab
);
7024 CASE_INT_FN (BUILT_IN_CLRSB
):
7025 target
= expand_builtin_unop (target_mode
, exp
, target
,
7026 subtarget
, clrsb_optab
);
7031 CASE_INT_FN (BUILT_IN_POPCOUNT
):
7032 target
= expand_builtin_unop (target_mode
, exp
, target
,
7033 subtarget
, popcount_optab
);
7038 CASE_INT_FN (BUILT_IN_PARITY
):
7039 target
= expand_builtin_unop (target_mode
, exp
, target
,
7040 subtarget
, parity_optab
);
7045 case BUILT_IN_STRLEN
:
7046 target
= expand_builtin_strlen (exp
, target
, target_mode
);
7051 case BUILT_IN_STRCAT
:
7052 target
= expand_builtin_strcat (exp
, target
);
7057 case BUILT_IN_STRCPY
:
7058 target
= expand_builtin_strcpy (exp
, target
);
7063 case BUILT_IN_STRNCAT
:
7064 target
= expand_builtin_strncat (exp
, target
);
7069 case BUILT_IN_STRNCPY
:
7070 target
= expand_builtin_strncpy (exp
, target
);
7075 case BUILT_IN_STPCPY
:
7076 target
= expand_builtin_stpcpy (exp
, target
, mode
);
7081 case BUILT_IN_STPNCPY
:
7082 target
= expand_builtin_stpncpy (exp
, target
);
7087 case BUILT_IN_MEMCHR
:
7088 target
= expand_builtin_memchr (exp
, target
);
7093 case BUILT_IN_MEMCPY
:
7094 target
= expand_builtin_memcpy (exp
, target
);
7099 case BUILT_IN_MEMMOVE
:
7100 target
= expand_builtin_memmove (exp
, target
);
7105 case BUILT_IN_MEMPCPY
:
7106 target
= expand_builtin_mempcpy (exp
, target
);
7111 case BUILT_IN_MEMSET
:
7112 target
= expand_builtin_memset (exp
, target
, mode
);
7117 case BUILT_IN_BZERO
:
7118 target
= expand_builtin_bzero (exp
);
7123 case BUILT_IN_STRCMP
:
7124 target
= expand_builtin_strcmp (exp
, target
);
7129 case BUILT_IN_STRNCMP
:
7130 target
= expand_builtin_strncmp (exp
, target
, mode
);
7136 case BUILT_IN_MEMCMP
:
7137 case BUILT_IN_MEMCMP_EQ
:
7138 target
= expand_builtin_memcmp (exp
, target
, fcode
== BUILT_IN_MEMCMP_EQ
);
7141 if (fcode
== BUILT_IN_MEMCMP_EQ
)
7143 tree newdecl
= builtin_decl_explicit (BUILT_IN_MEMCMP
);
7144 TREE_OPERAND (exp
, 1) = build_fold_addr_expr (newdecl
);
7148 case BUILT_IN_SETJMP
:
7149 /* This should have been lowered to the builtins below. */
7152 case BUILT_IN_SETJMP_SETUP
:
7153 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7154 and the receiver label. */
7155 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
7157 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
7158 VOIDmode
, EXPAND_NORMAL
);
7159 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
7160 rtx_insn
*label_r
= label_rtx (label
);
7162 /* This is copied from the handling of non-local gotos. */
7163 expand_builtin_setjmp_setup (buf_addr
, label_r
);
7164 nonlocal_goto_handler_labels
7165 = gen_rtx_INSN_LIST (VOIDmode
, label_r
,
7166 nonlocal_goto_handler_labels
);
7167 /* ??? Do not let expand_label treat us as such since we would
7168 not want to be both on the list of non-local labels and on
7169 the list of forced labels. */
7170 FORCED_LABEL (label
) = 0;
7175 case BUILT_IN_SETJMP_RECEIVER
:
7176 /* __builtin_setjmp_receiver is passed the receiver label. */
7177 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
7179 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
7180 rtx_insn
*label_r
= label_rtx (label
);
7182 expand_builtin_setjmp_receiver (label_r
);
7187 /* __builtin_longjmp is passed a pointer to an array of five words.
7188 It's similar to the C library longjmp function but works with
7189 __builtin_setjmp above. */
7190 case BUILT_IN_LONGJMP
:
7191 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
7193 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
7194 VOIDmode
, EXPAND_NORMAL
);
7195 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
7197 if (value
!= const1_rtx
)
7199 error ("%<__builtin_longjmp%> second argument must be 1");
7203 expand_builtin_longjmp (buf_addr
, value
);
7208 case BUILT_IN_NONLOCAL_GOTO
:
7209 target
= expand_builtin_nonlocal_goto (exp
);
7214 /* This updates the setjmp buffer that is its argument with the value
7215 of the current stack pointer. */
7216 case BUILT_IN_UPDATE_SETJMP_BUF
:
7217 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
7220 = expand_normal (CALL_EXPR_ARG (exp
, 0));
7222 expand_builtin_update_setjmp_buf (buf_addr
);
7228 expand_builtin_trap ();
7231 case BUILT_IN_UNREACHABLE
:
7232 expand_builtin_unreachable ();
7235 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
7236 case BUILT_IN_SIGNBITD32
:
7237 case BUILT_IN_SIGNBITD64
:
7238 case BUILT_IN_SIGNBITD128
:
7239 target
= expand_builtin_signbit (exp
, target
);
7244 /* Various hooks for the DWARF 2 __throw routine. */
7245 case BUILT_IN_UNWIND_INIT
:
7246 expand_builtin_unwind_init ();
7248 case BUILT_IN_DWARF_CFA
:
7249 return virtual_cfa_rtx
;
7250 #ifdef DWARF2_UNWIND_INFO
7251 case BUILT_IN_DWARF_SP_COLUMN
:
7252 return expand_builtin_dwarf_sp_column ();
7253 case BUILT_IN_INIT_DWARF_REG_SIZES
:
7254 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
7257 case BUILT_IN_FROB_RETURN_ADDR
:
7258 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
7259 case BUILT_IN_EXTRACT_RETURN_ADDR
:
7260 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
7261 case BUILT_IN_EH_RETURN
:
7262 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
7263 CALL_EXPR_ARG (exp
, 1));
7265 case BUILT_IN_EH_RETURN_DATA_REGNO
:
7266 return expand_builtin_eh_return_data_regno (exp
);
7267 case BUILT_IN_EXTEND_POINTER
:
7268 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
7269 case BUILT_IN_EH_POINTER
:
7270 return expand_builtin_eh_pointer (exp
);
7271 case BUILT_IN_EH_FILTER
:
7272 return expand_builtin_eh_filter (exp
);
7273 case BUILT_IN_EH_COPY_VALUES
:
7274 return expand_builtin_eh_copy_values (exp
);
7276 case BUILT_IN_VA_START
:
7277 return expand_builtin_va_start (exp
);
7278 case BUILT_IN_VA_END
:
7279 return expand_builtin_va_end (exp
);
7280 case BUILT_IN_VA_COPY
:
7281 return expand_builtin_va_copy (exp
);
7282 case BUILT_IN_EXPECT
:
7283 return expand_builtin_expect (exp
, target
);
7284 case BUILT_IN_ASSUME_ALIGNED
:
7285 return expand_builtin_assume_aligned (exp
, target
);
7286 case BUILT_IN_PREFETCH
:
7287 expand_builtin_prefetch (exp
);
7290 case BUILT_IN_INIT_TRAMPOLINE
:
7291 return expand_builtin_init_trampoline (exp
, true);
7292 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
7293 return expand_builtin_init_trampoline (exp
, false);
7294 case BUILT_IN_ADJUST_TRAMPOLINE
:
7295 return expand_builtin_adjust_trampoline (exp
);
7297 case BUILT_IN_INIT_DESCRIPTOR
:
7298 return expand_builtin_init_descriptor (exp
);
7299 case BUILT_IN_ADJUST_DESCRIPTOR
:
7300 return expand_builtin_adjust_descriptor (exp
);
7303 case BUILT_IN_EXECL
:
7304 case BUILT_IN_EXECV
:
7305 case BUILT_IN_EXECLP
:
7306 case BUILT_IN_EXECLE
:
7307 case BUILT_IN_EXECVP
:
7308 case BUILT_IN_EXECVE
:
7309 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
7314 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
7315 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
7316 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
7317 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
7318 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
7319 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
7320 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
7325 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
7326 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
7327 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
7328 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
7329 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
7330 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
7331 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
7336 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
7337 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
7338 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
7339 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
7340 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
7341 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
7342 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
7347 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
7348 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
7349 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
7350 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
7351 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
7352 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
7353 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
7358 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
7359 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
7360 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
7361 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
7362 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
7363 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
7364 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
7369 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
7370 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
7371 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
7372 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
7373 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
7374 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
7375 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
7380 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
7381 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
7382 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
7383 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
7384 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
7385 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
7386 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
7391 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
7392 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
7393 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
7394 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
7395 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
7396 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
7397 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
7402 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
7403 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
7404 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
7405 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
7406 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
7407 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
7408 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
7413 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
7414 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
7415 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
7416 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
7417 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
7418 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
7419 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
7424 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
7425 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
7426 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
7427 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
7428 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
7429 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
7430 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
7435 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
7436 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
7437 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
7438 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
7439 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
7440 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
7441 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
7446 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
7447 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
7448 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
7449 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
7450 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
7451 if (mode
== VOIDmode
)
7452 mode
= TYPE_MODE (boolean_type_node
);
7453 if (!target
|| !register_operand (target
, mode
))
7454 target
= gen_reg_rtx (mode
);
7456 mode
= get_builtin_sync_mode
7457 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
7458 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
7463 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
7464 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
7465 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
7466 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
7467 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
7468 mode
= get_builtin_sync_mode
7469 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
7470 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
7475 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
7476 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
7477 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
7478 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
7479 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
7480 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
7481 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
7486 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
7487 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
7488 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
7489 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
7490 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
7491 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
7492 expand_builtin_sync_lock_release (mode
, exp
);
7495 case BUILT_IN_SYNC_SYNCHRONIZE
:
7496 expand_builtin_sync_synchronize ();
7499 case BUILT_IN_ATOMIC_EXCHANGE_1
:
7500 case BUILT_IN_ATOMIC_EXCHANGE_2
:
7501 case BUILT_IN_ATOMIC_EXCHANGE_4
:
7502 case BUILT_IN_ATOMIC_EXCHANGE_8
:
7503 case BUILT_IN_ATOMIC_EXCHANGE_16
:
7504 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
7505 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
7510 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
7511 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
7512 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
7513 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
7514 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
7516 unsigned int nargs
, z
;
7517 vec
<tree
, va_gc
> *vec
;
7520 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
7521 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
7525 /* If this is turned into an external library call, the weak parameter
7526 must be dropped to match the expected parameter list. */
7527 nargs
= call_expr_nargs (exp
);
7528 vec_alloc (vec
, nargs
- 1);
7529 for (z
= 0; z
< 3; z
++)
7530 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
7531 /* Skip the boolean weak parameter. */
7532 for (z
= 4; z
< 6; z
++)
7533 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
7534 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
7538 case BUILT_IN_ATOMIC_LOAD_1
:
7539 case BUILT_IN_ATOMIC_LOAD_2
:
7540 case BUILT_IN_ATOMIC_LOAD_4
:
7541 case BUILT_IN_ATOMIC_LOAD_8
:
7542 case BUILT_IN_ATOMIC_LOAD_16
:
7543 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
7544 target
= expand_builtin_atomic_load (mode
, exp
, target
);
7549 case BUILT_IN_ATOMIC_STORE_1
:
7550 case BUILT_IN_ATOMIC_STORE_2
:
7551 case BUILT_IN_ATOMIC_STORE_4
:
7552 case BUILT_IN_ATOMIC_STORE_8
:
7553 case BUILT_IN_ATOMIC_STORE_16
:
7554 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
7555 target
= expand_builtin_atomic_store (mode
, exp
);
7560 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
7561 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
7562 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
7563 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
7564 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
7566 enum built_in_function lib
;
7567 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
7568 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
7569 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
7570 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
7576 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
7577 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
7578 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
7579 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
7580 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
7582 enum built_in_function lib
;
7583 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
7584 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
7585 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
7586 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
7592 case BUILT_IN_ATOMIC_AND_FETCH_1
:
7593 case BUILT_IN_ATOMIC_AND_FETCH_2
:
7594 case BUILT_IN_ATOMIC_AND_FETCH_4
:
7595 case BUILT_IN_ATOMIC_AND_FETCH_8
:
7596 case BUILT_IN_ATOMIC_AND_FETCH_16
:
7598 enum built_in_function lib
;
7599 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
7600 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
7601 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
7602 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
7608 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
7609 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
7610 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
7611 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
7612 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
7614 enum built_in_function lib
;
7615 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
7616 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
7617 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
7618 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
7624 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
7625 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
7626 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
7627 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
7628 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
7630 enum built_in_function lib
;
7631 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
7632 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
7633 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
7634 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
7640 case BUILT_IN_ATOMIC_OR_FETCH_1
:
7641 case BUILT_IN_ATOMIC_OR_FETCH_2
:
7642 case BUILT_IN_ATOMIC_OR_FETCH_4
:
7643 case BUILT_IN_ATOMIC_OR_FETCH_8
:
7644 case BUILT_IN_ATOMIC_OR_FETCH_16
:
7646 enum built_in_function lib
;
7647 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
7648 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
7649 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
7650 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
7656 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
7657 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
7658 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
7659 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
7660 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
7661 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
7662 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
7663 ignore
, BUILT_IN_NONE
);
7668 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
7669 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
7670 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
7671 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
7672 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
7673 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
7674 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
7675 ignore
, BUILT_IN_NONE
);
7680 case BUILT_IN_ATOMIC_FETCH_AND_1
:
7681 case BUILT_IN_ATOMIC_FETCH_AND_2
:
7682 case BUILT_IN_ATOMIC_FETCH_AND_4
:
7683 case BUILT_IN_ATOMIC_FETCH_AND_8
:
7684 case BUILT_IN_ATOMIC_FETCH_AND_16
:
7685 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
7686 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
7687 ignore
, BUILT_IN_NONE
);
7692 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
7693 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
7694 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
7695 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
7696 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
7697 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
7698 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
7699 ignore
, BUILT_IN_NONE
);
7704 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
7705 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
7706 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
7707 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
7708 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
7709 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
7710 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
7711 ignore
, BUILT_IN_NONE
);
7716 case BUILT_IN_ATOMIC_FETCH_OR_1
:
7717 case BUILT_IN_ATOMIC_FETCH_OR_2
:
7718 case BUILT_IN_ATOMIC_FETCH_OR_4
:
7719 case BUILT_IN_ATOMIC_FETCH_OR_8
:
7720 case BUILT_IN_ATOMIC_FETCH_OR_16
:
7721 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
7722 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
7723 ignore
, BUILT_IN_NONE
);
7728 case BUILT_IN_ATOMIC_TEST_AND_SET
:
7729 return expand_builtin_atomic_test_and_set (exp
, target
);
7731 case BUILT_IN_ATOMIC_CLEAR
:
7732 return expand_builtin_atomic_clear (exp
);
7734 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
7735 return expand_builtin_atomic_always_lock_free (exp
);
7737 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
7738 target
= expand_builtin_atomic_is_lock_free (exp
);
7743 case BUILT_IN_ATOMIC_THREAD_FENCE
:
7744 expand_builtin_atomic_thread_fence (exp
);
7747 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
7748 expand_builtin_atomic_signal_fence (exp
);
7751 case BUILT_IN_OBJECT_SIZE
:
7752 return expand_builtin_object_size (exp
);
7754 case BUILT_IN_MEMCPY_CHK
:
7755 case BUILT_IN_MEMPCPY_CHK
:
7756 case BUILT_IN_MEMMOVE_CHK
:
7757 case BUILT_IN_MEMSET_CHK
:
7758 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
7763 case BUILT_IN_STRCPY_CHK
:
7764 case BUILT_IN_STPCPY_CHK
:
7765 case BUILT_IN_STRNCPY_CHK
:
7766 case BUILT_IN_STPNCPY_CHK
:
7767 case BUILT_IN_STRCAT_CHK
:
7768 case BUILT_IN_STRNCAT_CHK
:
7769 case BUILT_IN_SNPRINTF_CHK
:
7770 case BUILT_IN_VSNPRINTF_CHK
:
7771 maybe_emit_chk_warning (exp
, fcode
);
7774 case BUILT_IN_SPRINTF_CHK
:
7775 case BUILT_IN_VSPRINTF_CHK
:
7776 maybe_emit_sprintf_chk_warning (exp
, fcode
);
7780 if (warn_free_nonheap_object
)
7781 maybe_emit_free_warning (exp
);
7784 case BUILT_IN_THREAD_POINTER
:
7785 return expand_builtin_thread_pointer (exp
, target
);
7787 case BUILT_IN_SET_THREAD_POINTER
:
7788 expand_builtin_set_thread_pointer (exp
);
7791 case BUILT_IN_CHKP_INIT_PTR_BOUNDS
:
7792 case BUILT_IN_CHKP_NULL_PTR_BOUNDS
:
7793 case BUILT_IN_CHKP_COPY_PTR_BOUNDS
:
7794 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
:
7795 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
:
7796 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS
:
7797 case BUILT_IN_CHKP_SET_PTR_BOUNDS
:
7798 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS
:
7799 case BUILT_IN_CHKP_STORE_PTR_BOUNDS
:
7800 case BUILT_IN_CHKP_GET_PTR_LBOUND
:
7801 case BUILT_IN_CHKP_GET_PTR_UBOUND
:
7802 /* We allow user CHKP builtins if Pointer Bounds
7804 if (!chkp_function_instrumented_p (current_function_decl
))
7806 if (fcode
== BUILT_IN_CHKP_SET_PTR_BOUNDS
7807 || fcode
== BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7808 || fcode
== BUILT_IN_CHKP_INIT_PTR_BOUNDS
7809 || fcode
== BUILT_IN_CHKP_NULL_PTR_BOUNDS
7810 || fcode
== BUILT_IN_CHKP_COPY_PTR_BOUNDS
)
7811 return expand_normal (CALL_EXPR_ARG (exp
, 0));
7812 else if (fcode
== BUILT_IN_CHKP_GET_PTR_LBOUND
)
7813 return expand_normal (size_zero_node
);
7814 else if (fcode
== BUILT_IN_CHKP_GET_PTR_UBOUND
)
7815 return expand_normal (size_int (-1));
7821 case BUILT_IN_CHKP_BNDMK
:
7822 case BUILT_IN_CHKP_BNDSTX
:
7823 case BUILT_IN_CHKP_BNDCL
:
7824 case BUILT_IN_CHKP_BNDCU
:
7825 case BUILT_IN_CHKP_BNDLDX
:
7826 case BUILT_IN_CHKP_BNDRET
:
7827 case BUILT_IN_CHKP_INTERSECT
:
7828 case BUILT_IN_CHKP_NARROW
:
7829 case BUILT_IN_CHKP_EXTRACT_LOWER
:
7830 case BUILT_IN_CHKP_EXTRACT_UPPER
:
7831 /* Software implementation of Pointer Bounds Checker is NYI.
7832 Target support is required. */
7833 error ("Your target platform does not support -fcheck-pointer-bounds");
7836 case BUILT_IN_ACC_ON_DEVICE
:
7837 /* Do library call, if we failed to expand the builtin when
7841 case BUILT_IN_GOACC_PARLEVEL_ID
:
7842 case BUILT_IN_GOACC_PARLEVEL_SIZE
:
7843 return expand_builtin_goacc_parlevel_id_size (exp
, target
, ignore
);
7845 default: /* just do library call, if unknown builtin */
7849 /* The switch statement above can drop through to cause the function
7850 to be called normally. */
7851 return expand_call (exp
, target
, ignore
);
7854 /* Similar to expand_builtin but is used for instrumented calls. */
7857 expand_builtin_with_bounds (tree exp
, rtx target
,
7858 rtx subtarget ATTRIBUTE_UNUSED
,
7859 machine_mode mode
, int ignore
)
7861 tree fndecl
= get_callee_fndecl (exp
);
7862 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7864 gcc_assert (CALL_WITH_BOUNDS_P (exp
));
7866 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7867 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
7869 gcc_assert (fcode
> BEGIN_CHKP_BUILTINS
7870 && fcode
< END_CHKP_BUILTINS
);
7874 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
:
7875 target
= expand_builtin_memcpy_with_bounds (exp
, target
);
7880 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
:
7881 target
= expand_builtin_mempcpy_with_bounds (exp
, target
);
7886 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
:
7887 target
= expand_builtin_memset_with_bounds (exp
, target
, mode
);
7892 case BUILT_IN_MEMCPY_CHKP
:
7893 case BUILT_IN_MEMMOVE_CHKP
:
7894 case BUILT_IN_MEMPCPY_CHKP
:
7895 if (call_expr_nargs (exp
) > 3)
7897 /* memcpy_chkp (void *dst, size_t dstbnd,
7898 const void *src, size_t srcbnd, size_t n)
7899 and others take a pointer bound argument just after each
7900 pointer argument. */
7901 tree dest
= CALL_EXPR_ARG (exp
, 0);
7902 tree src
= CALL_EXPR_ARG (exp
, 2);
7903 tree len
= CALL_EXPR_ARG (exp
, 4);
7905 check_memop_access (exp
, dest
, src
, len
);
7913 /* The switch statement above can drop through to cause the function
7914 to be called normally. */
7915 return expand_call (exp
, target
, ignore
);
7918 /* Determine whether a tree node represents a call to a built-in
7919 function. If the tree T is a call to a built-in function with
7920 the right number of arguments of the appropriate types, return
7921 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7922 Otherwise the return value is END_BUILTINS. */
7924 enum built_in_function
7925 builtin_mathfn_code (const_tree t
)
7927 const_tree fndecl
, arg
, parmlist
;
7928 const_tree argtype
, parmtype
;
7929 const_call_expr_arg_iterator iter
;
7931 if (TREE_CODE (t
) != CALL_EXPR
)
7932 return END_BUILTINS
;
7934 fndecl
= get_callee_fndecl (t
);
7935 if (fndecl
== NULL_TREE
7936 || TREE_CODE (fndecl
) != FUNCTION_DECL
7937 || ! DECL_BUILT_IN (fndecl
)
7938 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7939 return END_BUILTINS
;
7941 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
7942 init_const_call_expr_arg_iterator (t
, &iter
);
7943 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
7945 /* If a function doesn't take a variable number of arguments,
7946 the last element in the list will have type `void'. */
7947 parmtype
= TREE_VALUE (parmlist
);
7948 if (VOID_TYPE_P (parmtype
))
7950 if (more_const_call_expr_args_p (&iter
))
7951 return END_BUILTINS
;
7952 return DECL_FUNCTION_CODE (fndecl
);
7955 if (! more_const_call_expr_args_p (&iter
))
7956 return END_BUILTINS
;
7958 arg
= next_const_call_expr_arg (&iter
);
7959 argtype
= TREE_TYPE (arg
);
7961 if (SCALAR_FLOAT_TYPE_P (parmtype
))
7963 if (! SCALAR_FLOAT_TYPE_P (argtype
))
7964 return END_BUILTINS
;
7966 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
7968 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
7969 return END_BUILTINS
;
7971 else if (POINTER_TYPE_P (parmtype
))
7973 if (! POINTER_TYPE_P (argtype
))
7974 return END_BUILTINS
;
7976 else if (INTEGRAL_TYPE_P (parmtype
))
7978 if (! INTEGRAL_TYPE_P (argtype
))
7979 return END_BUILTINS
;
7982 return END_BUILTINS
;
7985 /* Variable-length argument list. */
7986 return DECL_FUNCTION_CODE (fndecl
);
7989 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7990 evaluate to a constant. */
7993 fold_builtin_constant_p (tree arg
)
7995 /* We return 1 for a numeric type that's known to be a constant
7996 value at compile-time or for an aggregate type that's a
7997 literal constant. */
8000 /* If we know this is a constant, emit the constant of one. */
8001 if (CONSTANT_CLASS_P (arg
)
8002 || (TREE_CODE (arg
) == CONSTRUCTOR
8003 && TREE_CONSTANT (arg
)))
8004 return integer_one_node
;
8005 if (TREE_CODE (arg
) == ADDR_EXPR
)
8007 tree op
= TREE_OPERAND (arg
, 0);
8008 if (TREE_CODE (op
) == STRING_CST
8009 || (TREE_CODE (op
) == ARRAY_REF
8010 && integer_zerop (TREE_OPERAND (op
, 1))
8011 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
8012 return integer_one_node
;
8015 /* If this expression has side effects, show we don't know it to be a
8016 constant. Likewise if it's a pointer or aggregate type since in
8017 those case we only want literals, since those are only optimized
8018 when generating RTL, not later.
8019 And finally, if we are compiling an initializer, not code, we
8020 need to return a definite result now; there's not going to be any
8021 more optimization done. */
8022 if (TREE_SIDE_EFFECTS (arg
)
8023 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
8024 || POINTER_TYPE_P (TREE_TYPE (arg
))
8026 || folding_initializer
8027 || force_folding_builtin_constant_p
)
8028 return integer_zero_node
;
8033 /* Create builtin_expect with PRED and EXPECTED as its arguments and
8034 return it as a truthvalue. */
8037 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
,
8040 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
8042 fn
= builtin_decl_explicit (BUILT_IN_EXPECT
);
8043 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
8044 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
8045 pred_type
= TREE_VALUE (arg_types
);
8046 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
8048 pred
= fold_convert_loc (loc
, pred_type
, pred
);
8049 expected
= fold_convert_loc (loc
, expected_type
, expected
);
8050 call_expr
= build_call_expr_loc (loc
, fn
, predictor
? 3 : 2, pred
, expected
,
8053 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
8054 build_int_cst (ret_type
, 0));
8057 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
8058 NULL_TREE if no simplification is possible. */
8061 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
, tree arg2
)
8063 tree inner
, fndecl
, inner_arg0
;
8064 enum tree_code code
;
8066 /* Distribute the expected value over short-circuiting operators.
8067 See through the cast from truthvalue_type_node to long. */
8069 while (CONVERT_EXPR_P (inner_arg0
)
8070 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
8071 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
8072 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
8074 /* If this is a builtin_expect within a builtin_expect keep the
8075 inner one. See through a comparison against a constant. It
8076 might have been added to create a thruthvalue. */
8079 if (COMPARISON_CLASS_P (inner
)
8080 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
8081 inner
= TREE_OPERAND (inner
, 0);
8083 if (TREE_CODE (inner
) == CALL_EXPR
8084 && (fndecl
= get_callee_fndecl (inner
))
8085 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
8086 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
8090 code
= TREE_CODE (inner
);
8091 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
8093 tree op0
= TREE_OPERAND (inner
, 0);
8094 tree op1
= TREE_OPERAND (inner
, 1);
8095 arg1
= save_expr (arg1
);
8097 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
, arg2
);
8098 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
, arg2
);
8099 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
8101 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
8104 /* If the argument isn't invariant then there's nothing else we can do. */
8105 if (!TREE_CONSTANT (inner_arg0
))
8108 /* If we expect that a comparison against the argument will fold to
8109 a constant return the constant. In practice, this means a true
8110 constant or the address of a non-weak symbol. */
8113 if (TREE_CODE (inner
) == ADDR_EXPR
)
8117 inner
= TREE_OPERAND (inner
, 0);
8119 while (TREE_CODE (inner
) == COMPONENT_REF
8120 || TREE_CODE (inner
) == ARRAY_REF
);
8121 if (VAR_OR_FUNCTION_DECL_P (inner
) && DECL_WEAK (inner
))
8125 /* Otherwise, ARG0 already has the proper type for the return value. */
8129 /* Fold a call to __builtin_classify_type with argument ARG. */
8132 fold_builtin_classify_type (tree arg
)
8135 return build_int_cst (integer_type_node
, no_type_class
);
8137 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
8140 /* Fold a call to __builtin_strlen with argument ARG. */
8143 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
8145 if (!validate_arg (arg
, POINTER_TYPE
))
8149 tree len
= c_strlen (arg
, 0);
8152 return fold_convert_loc (loc
, type
, len
);
8158 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8161 fold_builtin_inf (location_t loc
, tree type
, int warn
)
8163 REAL_VALUE_TYPE real
;
8165 /* __builtin_inff is intended to be usable to define INFINITY on all
8166 targets. If an infinity is not available, INFINITY expands "to a
8167 positive constant of type float that overflows at translation
8168 time", footnote "In this case, using INFINITY will violate the
8169 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8170 Thus we pedwarn to ensure this constraint violation is
8172 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
8173 pedwarn (loc
, 0, "target format does not support infinity");
8176 return build_real (type
, real
);
8179 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8180 NULL_TREE if no simplification can be made. */
8183 fold_builtin_sincos (location_t loc
,
8184 tree arg0
, tree arg1
, tree arg2
)
8187 tree fndecl
, call
= NULL_TREE
;
8189 if (!validate_arg (arg0
, REAL_TYPE
)
8190 || !validate_arg (arg1
, POINTER_TYPE
)
8191 || !validate_arg (arg2
, POINTER_TYPE
))
8194 type
= TREE_TYPE (arg0
);
8196 /* Calculate the result when the argument is a constant. */
8197 built_in_function fn
= mathfn_built_in_2 (type
, CFN_BUILT_IN_CEXPI
);
8198 if (fn
== END_BUILTINS
)
8201 /* Canonicalize sincos to cexpi. */
8202 if (TREE_CODE (arg0
) == REAL_CST
)
8204 tree complex_type
= build_complex_type (type
);
8205 call
= fold_const_call (as_combined_fn (fn
), complex_type
, arg0
);
8209 if (!targetm
.libc_has_function (function_c99_math_complex
)
8210 || !builtin_decl_implicit_p (fn
))
8212 fndecl
= builtin_decl_explicit (fn
);
8213 call
= build_call_expr_loc (loc
, fndecl
, 1, arg0
);
8214 call
= builtin_save_expr (call
);
8217 return build2 (COMPOUND_EXPR
, void_type_node
,
8218 build2 (MODIFY_EXPR
, void_type_node
,
8219 build_fold_indirect_ref_loc (loc
, arg1
),
8220 fold_build1_loc (loc
, IMAGPART_EXPR
, type
, call
)),
8221 build2 (MODIFY_EXPR
, void_type_node
,
8222 build_fold_indirect_ref_loc (loc
, arg2
),
8223 fold_build1_loc (loc
, REALPART_EXPR
, type
, call
)));
8226 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8227 Return NULL_TREE if no simplification can be made. */
8230 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
8232 if (!validate_arg (arg1
, POINTER_TYPE
)
8233 || !validate_arg (arg2
, POINTER_TYPE
)
8234 || !validate_arg (len
, INTEGER_TYPE
))
8237 /* If the LEN parameter is zero, return zero. */
8238 if (integer_zerop (len
))
8239 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
8242 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8243 if (operand_equal_p (arg1
, arg2
, 0))
8244 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
8246 /* If len parameter is one, return an expression corresponding to
8247 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8248 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
8250 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8251 tree cst_uchar_ptr_node
8252 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8255 = fold_convert_loc (loc
, integer_type_node
,
8256 build1 (INDIRECT_REF
, cst_uchar_node
,
8257 fold_convert_loc (loc
,
8261 = fold_convert_loc (loc
, integer_type_node
,
8262 build1 (INDIRECT_REF
, cst_uchar_node
,
8263 fold_convert_loc (loc
,
8266 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8272 /* Fold a call to builtin isascii with argument ARG. */
8275 fold_builtin_isascii (location_t loc
, tree arg
)
8277 if (!validate_arg (arg
, INTEGER_TYPE
))
8281 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8282 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
8283 build_int_cst (integer_type_node
,
8284 ~ (unsigned HOST_WIDE_INT
) 0x7f));
8285 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
8286 arg
, integer_zero_node
);
8290 /* Fold a call to builtin toascii with argument ARG. */
8293 fold_builtin_toascii (location_t loc
, tree arg
)
8295 if (!validate_arg (arg
, INTEGER_TYPE
))
8298 /* Transform toascii(c) -> (c & 0x7f). */
8299 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
8300 build_int_cst (integer_type_node
, 0x7f));
8303 /* Fold a call to builtin isdigit with argument ARG. */
8306 fold_builtin_isdigit (location_t loc
, tree arg
)
8308 if (!validate_arg (arg
, INTEGER_TYPE
))
8312 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8313 /* According to the C standard, isdigit is unaffected by locale.
8314 However, it definitely is affected by the target character set. */
8315 unsigned HOST_WIDE_INT target_digit0
8316 = lang_hooks
.to_target_charset ('0');
8318 if (target_digit0
== 0)
8321 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
8322 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
8323 build_int_cst (unsigned_type_node
, target_digit0
));
8324 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
8325 build_int_cst (unsigned_type_node
, 9));
8329 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8332 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
8334 if (!validate_arg (arg
, REAL_TYPE
))
8337 arg
= fold_convert_loc (loc
, type
, arg
);
8338 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8341 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8344 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
8346 if (!validate_arg (arg
, INTEGER_TYPE
))
8349 arg
= fold_convert_loc (loc
, type
, arg
);
8350 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8353 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8356 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
8358 if (validate_arg (arg
, COMPLEX_TYPE
)
8359 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
8361 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
8365 tree new_arg
= builtin_save_expr (arg
);
8366 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
8367 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
8368 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
8375 /* Fold a call to builtin frexp, we can assume the base is 2. */
8378 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
8380 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
8385 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
8388 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
8390 /* Proceed if a valid pointer type was passed in. */
8391 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
8393 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
8399 /* For +-0, return (*exp = 0, +-0). */
8400 exp
= integer_zero_node
;
8405 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8406 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
8409 /* Since the frexp function always expects base 2, and in
8410 GCC normalized significands are already in the range
8411 [0.5, 1.0), we have exactly what frexp wants. */
8412 REAL_VALUE_TYPE frac_rvt
= *value
;
8413 SET_REAL_EXP (&frac_rvt
, 0);
8414 frac
= build_real (rettype
, frac_rvt
);
8415 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
8422 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8423 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
8424 TREE_SIDE_EFFECTS (arg1
) = 1;
8425 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
8431 /* Fold a call to builtin modf. */
8434 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
8436 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
8441 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
8444 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
8446 /* Proceed if a valid pointer type was passed in. */
8447 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
8449 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
8450 REAL_VALUE_TYPE trunc
, frac
;
8456 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8457 trunc
= frac
= *value
;
8460 /* For +-Inf, return (*arg1 = arg0, +-0). */
8462 frac
.sign
= value
->sign
;
8466 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8467 real_trunc (&trunc
, VOIDmode
, value
);
8468 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
8469 /* If the original number was negative and already
8470 integral, then the fractional part is -0.0. */
8471 if (value
->sign
&& frac
.cl
== rvc_zero
)
8472 frac
.sign
= value
->sign
;
8476 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8477 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
8478 build_real (rettype
, trunc
));
8479 TREE_SIDE_EFFECTS (arg1
) = 1;
8480 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
8481 build_real (rettype
, frac
));
8487 /* Given a location LOC, an interclass builtin function decl FNDECL
8488 and its single argument ARG, return an folded expression computing
8489 the same, or NULL_TREE if we either couldn't or didn't want to fold
8490 (the latter happen if there's an RTL instruction available). */
8493 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
8497 if (!validate_arg (arg
, REAL_TYPE
))
8500 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
8503 mode
= TYPE_MODE (TREE_TYPE (arg
));
8505 bool is_ibm_extended
= MODE_COMPOSITE_P (mode
);
8507 /* If there is no optab, try generic code. */
8508 switch (DECL_FUNCTION_CODE (fndecl
))
8512 CASE_FLT_FN (BUILT_IN_ISINF
):
8514 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8515 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
8516 tree type
= TREE_TYPE (arg
);
8520 if (is_ibm_extended
)
8522 /* NaN and Inf are encoded in the high-order double value
8523 only. The low-order value is not significant. */
8524 type
= double_type_node
;
8526 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
8528 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
8529 real_from_string (&r
, buf
);
8530 result
= build_call_expr (isgr_fn
, 2,
8531 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
8532 build_real (type
, r
));
8535 CASE_FLT_FN (BUILT_IN_FINITE
):
8536 case BUILT_IN_ISFINITE
:
8538 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8539 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
8540 tree type
= TREE_TYPE (arg
);
8544 if (is_ibm_extended
)
8546 /* NaN and Inf are encoded in the high-order double value
8547 only. The low-order value is not significant. */
8548 type
= double_type_node
;
8550 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
8552 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
8553 real_from_string (&r
, buf
);
8554 result
= build_call_expr (isle_fn
, 2,
8555 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
8556 build_real (type
, r
));
8557 /*result = fold_build2_loc (loc, UNGT_EXPR,
8558 TREE_TYPE (TREE_TYPE (fndecl)),
8559 fold_build1_loc (loc, ABS_EXPR, type, arg),
8560 build_real (type, r));
8561 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8562 TREE_TYPE (TREE_TYPE (fndecl)),
8566 case BUILT_IN_ISNORMAL
:
8568 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8569 islessequal(fabs(x),DBL_MAX). */
8570 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
8571 tree type
= TREE_TYPE (arg
);
8572 tree orig_arg
, max_exp
, min_exp
;
8573 machine_mode orig_mode
= mode
;
8574 REAL_VALUE_TYPE rmax
, rmin
;
8577 orig_arg
= arg
= builtin_save_expr (arg
);
8578 if (is_ibm_extended
)
8580 /* Use double to test the normal range of IBM extended
8581 precision. Emin for IBM extended precision is
8582 different to emin for IEEE double, being 53 higher
8583 since the low double exponent is at least 53 lower
8584 than the high double exponent. */
8585 type
= double_type_node
;
8587 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
8589 arg
= fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
8591 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
8592 real_from_string (&rmax
, buf
);
8593 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (orig_mode
)->emin
- 1);
8594 real_from_string (&rmin
, buf
);
8595 max_exp
= build_real (type
, rmax
);
8596 min_exp
= build_real (type
, rmin
);
8598 max_exp
= build_call_expr (isle_fn
, 2, arg
, max_exp
);
8599 if (is_ibm_extended
)
8601 /* Testing the high end of the range is done just using
8602 the high double, using the same test as isfinite().
8603 For the subnormal end of the range we first test the
8604 high double, then if its magnitude is equal to the
8605 limit of 0x1p-969, we test whether the low double is
8606 non-zero and opposite sign to the high double. */
8607 tree
const islt_fn
= builtin_decl_explicit (BUILT_IN_ISLESS
);
8608 tree
const isgt_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
8609 tree gt_min
= build_call_expr (isgt_fn
, 2, arg
, min_exp
);
8610 tree eq_min
= fold_build2 (EQ_EXPR
, integer_type_node
,
8612 tree as_complex
= build1 (VIEW_CONVERT_EXPR
,
8613 complex_double_type_node
, orig_arg
);
8614 tree hi_dbl
= build1 (REALPART_EXPR
, type
, as_complex
);
8615 tree lo_dbl
= build1 (IMAGPART_EXPR
, type
, as_complex
);
8616 tree zero
= build_real (type
, dconst0
);
8617 tree hilt
= build_call_expr (islt_fn
, 2, hi_dbl
, zero
);
8618 tree lolt
= build_call_expr (islt_fn
, 2, lo_dbl
, zero
);
8619 tree logt
= build_call_expr (isgt_fn
, 2, lo_dbl
, zero
);
8620 tree ok_lo
= fold_build1 (TRUTH_NOT_EXPR
, integer_type_node
,
8621 fold_build3 (COND_EXPR
,
8624 eq_min
= fold_build2 (TRUTH_ANDIF_EXPR
, integer_type_node
,
8626 min_exp
= fold_build2 (TRUTH_ORIF_EXPR
, integer_type_node
,
8632 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
8633 min_exp
= build_call_expr (isge_fn
, 2, arg
, min_exp
);
8635 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
,
8646 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8647 ARG is the argument for the call. */
8650 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
8652 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8654 if (!validate_arg (arg
, REAL_TYPE
))
8657 switch (builtin_index
)
8659 case BUILT_IN_ISINF
:
8660 if (!HONOR_INFINITIES (arg
))
8661 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
8665 case BUILT_IN_ISINF_SIGN
:
8667 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8668 /* In a boolean context, GCC will fold the inner COND_EXPR to
8669 1. So e.g. "if (isinf_sign(x))" would be folded to just
8670 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8671 tree signbit_fn
= builtin_decl_explicit (BUILT_IN_SIGNBIT
);
8672 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
8673 tree tmp
= NULL_TREE
;
8675 arg
= builtin_save_expr (arg
);
8677 if (signbit_fn
&& isinf_fn
)
8679 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
8680 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
8682 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
8683 signbit_call
, integer_zero_node
);
8684 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
8685 isinf_call
, integer_zero_node
);
8687 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
8688 integer_minus_one_node
, integer_one_node
);
8689 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
8697 case BUILT_IN_ISFINITE
:
8698 if (!HONOR_NANS (arg
)
8699 && !HONOR_INFINITIES (arg
))
8700 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
8704 case BUILT_IN_ISNAN
:
8705 if (!HONOR_NANS (arg
))
8706 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
8709 bool is_ibm_extended
= MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg
)));
8710 if (is_ibm_extended
)
8712 /* NaN and Inf are encoded in the high-order double value
8713 only. The low-order value is not significant. */
8714 arg
= fold_build1_loc (loc
, NOP_EXPR
, double_type_node
, arg
);
8717 arg
= builtin_save_expr (arg
);
8718 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
8725 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8726 This builtin will generate code to return the appropriate floating
8727 point classification depending on the value of the floating point
8728 number passed in. The possible return values must be supplied as
8729 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8730 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8731 one floating point argument which is "type generic". */
8734 fold_builtin_fpclassify (location_t loc
, tree
*args
, int nargs
)
8736 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
8737 arg
, type
, res
, tmp
;
8742 /* Verify the required arguments in the original call. */
8744 || !validate_arg (args
[0], INTEGER_TYPE
)
8745 || !validate_arg (args
[1], INTEGER_TYPE
)
8746 || !validate_arg (args
[2], INTEGER_TYPE
)
8747 || !validate_arg (args
[3], INTEGER_TYPE
)
8748 || !validate_arg (args
[4], INTEGER_TYPE
)
8749 || !validate_arg (args
[5], REAL_TYPE
))
8753 fp_infinite
= args
[1];
8754 fp_normal
= args
[2];
8755 fp_subnormal
= args
[3];
8758 type
= TREE_TYPE (arg
);
8759 mode
= TYPE_MODE (type
);
8760 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
8764 (fabs(x) == Inf ? FP_INFINITE :
8765 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8766 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8768 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
8769 build_real (type
, dconst0
));
8770 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
8771 tmp
, fp_zero
, fp_subnormal
);
8773 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
8774 real_from_string (&r
, buf
);
8775 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
8776 arg
, build_real (type
, r
));
8777 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
8779 if (HONOR_INFINITIES (mode
))
8782 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
8783 build_real (type
, r
));
8784 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
8788 if (HONOR_NANS (mode
))
8790 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
8791 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
8797 /* Fold a call to an unordered comparison function such as
8798 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8799 being called and ARG0 and ARG1 are the arguments for the call.
8800 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8801 the opposite of the desired result. UNORDERED_CODE is used
8802 for modes that can hold NaNs and ORDERED_CODE is used for
8806 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
8807 enum tree_code unordered_code
,
8808 enum tree_code ordered_code
)
8810 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8811 enum tree_code code
;
8813 enum tree_code code0
, code1
;
8814 tree cmp_type
= NULL_TREE
;
8816 type0
= TREE_TYPE (arg0
);
8817 type1
= TREE_TYPE (arg1
);
8819 code0
= TREE_CODE (type0
);
8820 code1
= TREE_CODE (type1
);
8822 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
8823 /* Choose the wider of two real types. */
8824 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
8826 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
8828 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
8831 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
8832 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
8834 if (unordered_code
== UNORDERED_EXPR
)
8836 if (!HONOR_NANS (arg0
))
8837 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
8838 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
8841 code
= HONOR_NANS (arg0
) ? unordered_code
: ordered_code
;
8842 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
8843 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
8846 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8847 arithmetics if it can never overflow, or into internal functions that
8848 return both result of arithmetics and overflowed boolean flag in
8849 a complex integer result, or some other check for overflow.
8850 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8851 checking part of that. */
8854 fold_builtin_arith_overflow (location_t loc
, enum built_in_function fcode
,
8855 tree arg0
, tree arg1
, tree arg2
)
8857 enum internal_fn ifn
= IFN_LAST
;
8858 /* The code of the expression corresponding to the type-generic
8859 built-in, or ERROR_MARK for the type-specific ones. */
8860 enum tree_code opcode
= ERROR_MARK
;
8861 bool ovf_only
= false;
8865 case BUILT_IN_ADD_OVERFLOW_P
:
8868 case BUILT_IN_ADD_OVERFLOW
:
8871 case BUILT_IN_SADD_OVERFLOW
:
8872 case BUILT_IN_SADDL_OVERFLOW
:
8873 case BUILT_IN_SADDLL_OVERFLOW
:
8874 case BUILT_IN_UADD_OVERFLOW
:
8875 case BUILT_IN_UADDL_OVERFLOW
:
8876 case BUILT_IN_UADDLL_OVERFLOW
:
8877 ifn
= IFN_ADD_OVERFLOW
;
8879 case BUILT_IN_SUB_OVERFLOW_P
:
8882 case BUILT_IN_SUB_OVERFLOW
:
8883 opcode
= MINUS_EXPR
;
8885 case BUILT_IN_SSUB_OVERFLOW
:
8886 case BUILT_IN_SSUBL_OVERFLOW
:
8887 case BUILT_IN_SSUBLL_OVERFLOW
:
8888 case BUILT_IN_USUB_OVERFLOW
:
8889 case BUILT_IN_USUBL_OVERFLOW
:
8890 case BUILT_IN_USUBLL_OVERFLOW
:
8891 ifn
= IFN_SUB_OVERFLOW
;
8893 case BUILT_IN_MUL_OVERFLOW_P
:
8896 case BUILT_IN_MUL_OVERFLOW
:
8899 case BUILT_IN_SMUL_OVERFLOW
:
8900 case BUILT_IN_SMULL_OVERFLOW
:
8901 case BUILT_IN_SMULLL_OVERFLOW
:
8902 case BUILT_IN_UMUL_OVERFLOW
:
8903 case BUILT_IN_UMULL_OVERFLOW
:
8904 case BUILT_IN_UMULLL_OVERFLOW
:
8905 ifn
= IFN_MUL_OVERFLOW
;
8911 /* For the "generic" overloads, the first two arguments can have different
8912 types and the last argument determines the target type to use to check
8913 for overflow. The arguments of the other overloads all have the same
8915 tree type
= ovf_only
? TREE_TYPE (arg2
) : TREE_TYPE (TREE_TYPE (arg2
));
8917 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8918 arguments are constant, attempt to fold the built-in call into a constant
8919 expression indicating whether or not it detected an overflow. */
8921 && TREE_CODE (arg0
) == INTEGER_CST
8922 && TREE_CODE (arg1
) == INTEGER_CST
)
8923 /* Perform the computation in the target type and check for overflow. */
8924 return omit_one_operand_loc (loc
, boolean_type_node
,
8925 arith_overflowed_p (opcode
, type
, arg0
, arg1
)
8926 ? boolean_true_node
: boolean_false_node
,
8929 tree ctype
= build_complex_type (type
);
8930 tree call
= build_call_expr_internal_loc (loc
, ifn
, ctype
,
8932 tree tgt
= save_expr (call
);
8933 tree intres
= build1_loc (loc
, REALPART_EXPR
, type
, tgt
);
8934 tree ovfres
= build1_loc (loc
, IMAGPART_EXPR
, type
, tgt
);
8935 ovfres
= fold_convert_loc (loc
, boolean_type_node
, ovfres
);
8938 return omit_one_operand_loc (loc
, boolean_type_node
, ovfres
, arg2
);
8940 tree mem_arg2
= build_fold_indirect_ref_loc (loc
, arg2
);
8942 = fold_build2_loc (loc
, MODIFY_EXPR
, void_type_node
, mem_arg2
, intres
);
8943 return build2_loc (loc
, COMPOUND_EXPR
, boolean_type_node
, store
, ovfres
);
8946 /* Fold a call to __builtin_FILE to a constant string. */
8949 fold_builtin_FILE (location_t loc
)
8951 if (const char *fname
= LOCATION_FILE (loc
))
8953 /* The documentation says this builtin is equivalent to the preprocessor
8954 __FILE__ macro so it appears appropriate to use the same file prefix
8956 fname
= remap_macro_filename (fname
);
8957 return build_string_literal (strlen (fname
) + 1, fname
);
8960 return build_string_literal (1, "");
8963 /* Fold a call to __builtin_FUNCTION to a constant string. */
8966 fold_builtin_FUNCTION ()
8968 const char *name
= "";
8970 if (current_function_decl
)
8971 name
= lang_hooks
.decl_printable_name (current_function_decl
, 0);
8973 return build_string_literal (strlen (name
) + 1, name
);
8976 /* Fold a call to __builtin_LINE to an integer constant. */
8979 fold_builtin_LINE (location_t loc
, tree type
)
8981 return build_int_cst (type
, LOCATION_LINE (loc
));
8984 /* Fold a call to built-in function FNDECL with 0 arguments.
8985 This function returns NULL_TREE if no simplification was possible. */
8988 fold_builtin_0 (location_t loc
, tree fndecl
)
8990 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8991 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
8995 return fold_builtin_FILE (loc
);
8997 case BUILT_IN_FUNCTION
:
8998 return fold_builtin_FUNCTION ();
9001 return fold_builtin_LINE (loc
, type
);
9003 CASE_FLT_FN (BUILT_IN_INF
):
9004 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF
):
9005 case BUILT_IN_INFD32
:
9006 case BUILT_IN_INFD64
:
9007 case BUILT_IN_INFD128
:
9008 return fold_builtin_inf (loc
, type
, true);
9010 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
9011 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL
):
9012 return fold_builtin_inf (loc
, type
, false);
9014 case BUILT_IN_CLASSIFY_TYPE
:
9015 return fold_builtin_classify_type (NULL_TREE
);
9023 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9024 This function returns NULL_TREE if no simplification was possible. */
9027 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
)
9029 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9030 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9032 if (TREE_CODE (arg0
) == ERROR_MARK
)
9035 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
, arg0
))
9040 case BUILT_IN_CONSTANT_P
:
9042 tree val
= fold_builtin_constant_p (arg0
);
9044 /* Gimplification will pull the CALL_EXPR for the builtin out of
9045 an if condition. When not optimizing, we'll not CSE it back.
9046 To avoid link error types of regressions, return false now. */
9047 if (!val
&& !optimize
)
9048 val
= integer_zero_node
;
9053 case BUILT_IN_CLASSIFY_TYPE
:
9054 return fold_builtin_classify_type (arg0
);
9056 case BUILT_IN_STRLEN
:
9057 return fold_builtin_strlen (loc
, type
, arg0
);
9059 CASE_FLT_FN (BUILT_IN_FABS
):
9060 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
9061 case BUILT_IN_FABSD32
:
9062 case BUILT_IN_FABSD64
:
9063 case BUILT_IN_FABSD128
:
9064 return fold_builtin_fabs (loc
, arg0
, type
);
9068 case BUILT_IN_LLABS
:
9069 case BUILT_IN_IMAXABS
:
9070 return fold_builtin_abs (loc
, arg0
, type
);
9072 CASE_FLT_FN (BUILT_IN_CONJ
):
9073 if (validate_arg (arg0
, COMPLEX_TYPE
)
9074 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9075 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
9078 CASE_FLT_FN (BUILT_IN_CREAL
):
9079 if (validate_arg (arg0
, COMPLEX_TYPE
)
9080 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9081 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));
9084 CASE_FLT_FN (BUILT_IN_CIMAG
):
9085 if (validate_arg (arg0
, COMPLEX_TYPE
)
9086 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
9087 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
9090 CASE_FLT_FN (BUILT_IN_CARG
):
9091 return fold_builtin_carg (loc
, arg0
, type
);
9093 case BUILT_IN_ISASCII
:
9094 return fold_builtin_isascii (loc
, arg0
);
9096 case BUILT_IN_TOASCII
:
9097 return fold_builtin_toascii (loc
, arg0
);
9099 case BUILT_IN_ISDIGIT
:
9100 return fold_builtin_isdigit (loc
, arg0
);
9102 CASE_FLT_FN (BUILT_IN_FINITE
):
9103 case BUILT_IN_FINITED32
:
9104 case BUILT_IN_FINITED64
:
9105 case BUILT_IN_FINITED128
:
9106 case BUILT_IN_ISFINITE
:
9108 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
9111 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9114 CASE_FLT_FN (BUILT_IN_ISINF
):
9115 case BUILT_IN_ISINFD32
:
9116 case BUILT_IN_ISINFD64
:
9117 case BUILT_IN_ISINFD128
:
9119 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
9122 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9125 case BUILT_IN_ISNORMAL
:
9126 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
9128 case BUILT_IN_ISINF_SIGN
:
9129 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
9131 CASE_FLT_FN (BUILT_IN_ISNAN
):
9132 case BUILT_IN_ISNAND32
:
9133 case BUILT_IN_ISNAND64
:
9134 case BUILT_IN_ISNAND128
:
9135 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
9138 if (integer_zerop (arg0
))
9139 return build_empty_stmt (loc
);
9150 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9151 This function returns NULL_TREE if no simplification was possible. */
9154 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
)
9156 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9157 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9159 if (TREE_CODE (arg0
) == ERROR_MARK
9160 || TREE_CODE (arg1
) == ERROR_MARK
)
9163 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
, arg0
, arg1
))
9168 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
9169 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
9170 if (validate_arg (arg0
, REAL_TYPE
)
9171 && validate_arg (arg1
, POINTER_TYPE
))
9172 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
9175 CASE_FLT_FN (BUILT_IN_FREXP
):
9176 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
9178 CASE_FLT_FN (BUILT_IN_MODF
):
9179 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
9181 case BUILT_IN_STRSPN
:
9182 return fold_builtin_strspn (loc
, arg0
, arg1
);
9184 case BUILT_IN_STRCSPN
:
9185 return fold_builtin_strcspn (loc
, arg0
, arg1
);
9187 case BUILT_IN_STRPBRK
:
9188 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
9190 case BUILT_IN_EXPECT
:
9191 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
);
9193 case BUILT_IN_ISGREATER
:
9194 return fold_builtin_unordered_cmp (loc
, fndecl
,
9195 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
9196 case BUILT_IN_ISGREATEREQUAL
:
9197 return fold_builtin_unordered_cmp (loc
, fndecl
,
9198 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
9199 case BUILT_IN_ISLESS
:
9200 return fold_builtin_unordered_cmp (loc
, fndecl
,
9201 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
9202 case BUILT_IN_ISLESSEQUAL
:
9203 return fold_builtin_unordered_cmp (loc
, fndecl
,
9204 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
9205 case BUILT_IN_ISLESSGREATER
:
9206 return fold_builtin_unordered_cmp (loc
, fndecl
,
9207 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
9208 case BUILT_IN_ISUNORDERED
:
9209 return fold_builtin_unordered_cmp (loc
, fndecl
,
9210 arg0
, arg1
, UNORDERED_EXPR
,
9213 /* We do the folding for va_start in the expander. */
9214 case BUILT_IN_VA_START
:
9217 case BUILT_IN_OBJECT_SIZE
:
9218 return fold_builtin_object_size (arg0
, arg1
);
9220 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
9221 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
9223 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
9224 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
9232 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9234 This function returns NULL_TREE if no simplification was possible. */
9237 fold_builtin_3 (location_t loc
, tree fndecl
,
9238 tree arg0
, tree arg1
, tree arg2
)
9240 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9241 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9243 if (TREE_CODE (arg0
) == ERROR_MARK
9244 || TREE_CODE (arg1
) == ERROR_MARK
9245 || TREE_CODE (arg2
) == ERROR_MARK
)
9248 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
,
9255 CASE_FLT_FN (BUILT_IN_SINCOS
):
9256 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
9258 CASE_FLT_FN (BUILT_IN_REMQUO
):
9259 if (validate_arg (arg0
, REAL_TYPE
)
9260 && validate_arg (arg1
, REAL_TYPE
)
9261 && validate_arg (arg2
, POINTER_TYPE
))
9262 return do_mpfr_remquo (arg0
, arg1
, arg2
);
9265 case BUILT_IN_MEMCMP
:
9266 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);
9268 case BUILT_IN_EXPECT
:
9269 return fold_builtin_expect (loc
, arg0
, arg1
, arg2
);
9271 case BUILT_IN_ADD_OVERFLOW
:
9272 case BUILT_IN_SUB_OVERFLOW
:
9273 case BUILT_IN_MUL_OVERFLOW
:
9274 case BUILT_IN_ADD_OVERFLOW_P
:
9275 case BUILT_IN_SUB_OVERFLOW_P
:
9276 case BUILT_IN_MUL_OVERFLOW_P
:
9277 case BUILT_IN_SADD_OVERFLOW
:
9278 case BUILT_IN_SADDL_OVERFLOW
:
9279 case BUILT_IN_SADDLL_OVERFLOW
:
9280 case BUILT_IN_SSUB_OVERFLOW
:
9281 case BUILT_IN_SSUBL_OVERFLOW
:
9282 case BUILT_IN_SSUBLL_OVERFLOW
:
9283 case BUILT_IN_SMUL_OVERFLOW
:
9284 case BUILT_IN_SMULL_OVERFLOW
:
9285 case BUILT_IN_SMULLL_OVERFLOW
:
9286 case BUILT_IN_UADD_OVERFLOW
:
9287 case BUILT_IN_UADDL_OVERFLOW
:
9288 case BUILT_IN_UADDLL_OVERFLOW
:
9289 case BUILT_IN_USUB_OVERFLOW
:
9290 case BUILT_IN_USUBL_OVERFLOW
:
9291 case BUILT_IN_USUBLL_OVERFLOW
:
9292 case BUILT_IN_UMUL_OVERFLOW
:
9293 case BUILT_IN_UMULL_OVERFLOW
:
9294 case BUILT_IN_UMULLL_OVERFLOW
:
9295 return fold_builtin_arith_overflow (loc
, fcode
, arg0
, arg1
, arg2
);
9303 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9304 arguments. IGNORE is true if the result of the
9305 function call is ignored. This function returns NULL_TREE if no
9306 simplification was possible. */
9309 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool)
9311 tree ret
= NULL_TREE
;
9316 ret
= fold_builtin_0 (loc
, fndecl
);
9319 ret
= fold_builtin_1 (loc
, fndecl
, args
[0]);
9322 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1]);
9325 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2]);
9328 ret
= fold_builtin_varargs (loc
, fndecl
, args
, nargs
);
9333 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
9334 SET_EXPR_LOCATION (ret
, loc
);
9335 TREE_NO_WARNING (ret
) = 1;
9341 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9342 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9343 of arguments in ARGS to be omitted. OLDNARGS is the number of
9344 elements in ARGS. */
9347 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
9348 int skip
, tree fndecl
, int n
, va_list newargs
)
9350 int nargs
= oldnargs
- skip
+ n
;
9357 buffer
= XALLOCAVEC (tree
, nargs
);
9358 for (i
= 0; i
< n
; i
++)
9359 buffer
[i
] = va_arg (newargs
, tree
);
9360 for (j
= skip
; j
< oldnargs
; j
++, i
++)
9361 buffer
[i
] = args
[j
];
9364 buffer
= args
+ skip
;
9366 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
9369 /* Return true if FNDECL shouldn't be folded right now.
9370 If a built-in function has an inline attribute always_inline
9371 wrapper, defer folding it after always_inline functions have
9372 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9373 might not be performed. */
9376 avoid_folding_inline_builtin (tree fndecl
)
9378 return (DECL_DECLARED_INLINE_P (fndecl
)
9379 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
9381 && !cfun
->always_inline_functions_inlined
9382 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
9385 /* A wrapper function for builtin folding that prevents warnings for
9386 "statement without effect" and the like, caused by removing the
9387 call node earlier than the warning is generated. */
9390 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
9392 tree ret
= NULL_TREE
;
9393 tree fndecl
= get_callee_fndecl (exp
);
9395 && TREE_CODE (fndecl
) == FUNCTION_DECL
9396 && DECL_BUILT_IN (fndecl
)
9397 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9398 yet. Defer folding until we see all the arguments
9399 (after inlining). */
9400 && !CALL_EXPR_VA_ARG_PACK (exp
))
9402 int nargs
= call_expr_nargs (exp
);
9404 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9405 instead last argument is __builtin_va_arg_pack (). Defer folding
9406 even in that case, until arguments are finalized. */
9407 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
9409 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
9411 && TREE_CODE (fndecl2
) == FUNCTION_DECL
9412 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
9413 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
9417 if (avoid_folding_inline_builtin (fndecl
))
9420 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
9421 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
9422 CALL_EXPR_ARGP (exp
), ignore
);
9425 tree
*args
= CALL_EXPR_ARGP (exp
);
9426 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
9434 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9435 N arguments are passed in the array ARGARRAY. Return a folded
9436 expression or NULL_TREE if no simplification was possible. */
9439 fold_builtin_call_array (location_t loc
, tree
,
9444 if (TREE_CODE (fn
) != ADDR_EXPR
)
9447 tree fndecl
= TREE_OPERAND (fn
, 0);
9448 if (TREE_CODE (fndecl
) == FUNCTION_DECL
9449 && DECL_BUILT_IN (fndecl
))
9451 /* If last argument is __builtin_va_arg_pack (), arguments to this
9452 function are not finalized yet. Defer folding until they are. */
9453 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
9455 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
9457 && TREE_CODE (fndecl2
) == FUNCTION_DECL
9458 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
9459 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
9462 if (avoid_folding_inline_builtin (fndecl
))
9464 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
9465 return targetm
.fold_builtin (fndecl
, n
, argarray
, false);
9467 return fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
9473 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9474 along with N new arguments specified as the "..." parameters. SKIP
9475 is the number of arguments in EXP to be omitted. This function is used
9476 to do varargs-to-varargs transformations. */
9479 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
9485 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
9486 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
9492 /* Validate a single argument ARG against a tree code CODE representing
9493 a type. Return true when argument is valid. */
9496 validate_arg (const_tree arg
, enum tree_code code
)
9500 else if (code
== POINTER_TYPE
)
9501 return POINTER_TYPE_P (TREE_TYPE (arg
));
9502 else if (code
== INTEGER_TYPE
)
9503 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
9504 return code
== TREE_CODE (TREE_TYPE (arg
));
9507 /* This function validates the types of a function call argument list
9508 against a specified list of tree_codes. If the last specifier is a 0,
9509 that represents an ellipses, otherwise the last specifier must be a
9512 This is the GIMPLE version of validate_arglist. Eventually we want to
9513 completely convert builtins.c to work from GIMPLEs and the tree based
9514 validate_arglist will then be removed. */
9517 validate_gimple_arglist (const gcall
*call
, ...)
9519 enum tree_code code
;
9525 va_start (ap
, call
);
9530 code
= (enum tree_code
) va_arg (ap
, int);
9534 /* This signifies an ellipses, any further arguments are all ok. */
9538 /* This signifies an endlink, if no arguments remain, return
9539 true, otherwise return false. */
9540 res
= (i
== gimple_call_num_args (call
));
9543 /* If no parameters remain or the parameter's code does not
9544 match the specified code, return false. Otherwise continue
9545 checking any remaining arguments. */
9546 arg
= gimple_call_arg (call
, i
++);
9547 if (!validate_arg (arg
, code
))
9554 /* We need gotos here since we can only have one VA_CLOSE in a
9562 /* Default target-specific builtin expander that does nothing. */
9565 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
9566 rtx target ATTRIBUTE_UNUSED
,
9567 rtx subtarget ATTRIBUTE_UNUSED
,
9568 machine_mode mode ATTRIBUTE_UNUSED
,
9569 int ignore ATTRIBUTE_UNUSED
)
9574 /* Returns true is EXP represents data that would potentially reside
9575 in a readonly section. */
9578 readonly_data_expr (tree exp
)
9582 if (TREE_CODE (exp
) != ADDR_EXPR
)
9585 exp
= get_base_address (TREE_OPERAND (exp
, 0));
9589 /* Make sure we call decl_readonly_section only for trees it
9590 can handle (since it returns true for everything it doesn't
9592 if (TREE_CODE (exp
) == STRING_CST
9593 || TREE_CODE (exp
) == CONSTRUCTOR
9594 || (VAR_P (exp
) && TREE_STATIC (exp
)))
9595 return decl_readonly_section (exp
, 0);
9600 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9601 to the call, and TYPE is its return type.
9603 Return NULL_TREE if no simplification was possible, otherwise return the
9604 simplified form of the call as a tree.
9606 The simplified form may be a constant or other expression which
9607 computes the same value, but in a more efficient manner (including
9608 calls to other builtin functions).
9610 The call may contain arguments which need to be evaluated, but
9611 which are not useful to determine the result of the call. In
9612 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9613 COMPOUND_EXPR will be an argument which must be evaluated.
9614 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9615 COMPOUND_EXPR in the chain will contain the tree for the simplified
9616 form of the builtin function call. */
9619 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
9621 if (!validate_arg (s1
, POINTER_TYPE
)
9622 || !validate_arg (s2
, POINTER_TYPE
))
9627 const char *p1
, *p2
;
9636 const char *r
= strpbrk (p1
, p2
);
9640 return build_int_cst (TREE_TYPE (s1
), 0);
9642 /* Return an offset into the constant string argument. */
9643 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
9644 return fold_convert_loc (loc
, type
, tem
);
9648 /* strpbrk(x, "") == NULL.
9649 Evaluate and ignore s1 in case it had side-effects. */
9650 return omit_one_operand_loc (loc
, type
, integer_zero_node
, s1
);
9653 return NULL_TREE
; /* Really call strpbrk. */
9655 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
9659 /* New argument list transforming strpbrk(s1, s2) to
9660 strchr(s1, s2[0]). */
9661 return build_call_expr_loc (loc
, fn
, 2, s1
,
9662 build_int_cst (integer_type_node
, p2
[0]));
9666 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9669 Return NULL_TREE if no simplification was possible, otherwise return the
9670 simplified form of the call as a tree.
9672 The simplified form may be a constant or other expression which
9673 computes the same value, but in a more efficient manner (including
9674 calls to other builtin functions).
9676 The call may contain arguments which need to be evaluated, but
9677 which are not useful to determine the result of the call. In
9678 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9679 COMPOUND_EXPR will be an argument which must be evaluated.
9680 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9681 COMPOUND_EXPR in the chain will contain the tree for the simplified
9682 form of the builtin function call. */
9685 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
9687 if (!validate_arg (s1
, POINTER_TYPE
)
9688 || !validate_arg (s2
, POINTER_TYPE
))
9692 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
9694 /* If either argument is "", return NULL_TREE. */
9695 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
9696 /* Evaluate and ignore both arguments in case either one has
9698 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
9704 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9707 Return NULL_TREE if no simplification was possible, otherwise return the
9708 simplified form of the call as a tree.
9710 The simplified form may be a constant or other expression which
9711 computes the same value, but in a more efficient manner (including
9712 calls to other builtin functions).
9714 The call may contain arguments which need to be evaluated, but
9715 which are not useful to determine the result of the call. In
9716 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9717 COMPOUND_EXPR will be an argument which must be evaluated.
9718 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9719 COMPOUND_EXPR in the chain will contain the tree for the simplified
9720 form of the builtin function call. */
9723 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
9725 if (!validate_arg (s1
, POINTER_TYPE
)
9726 || !validate_arg (s2
, POINTER_TYPE
))
9730 /* If the first argument is "", return NULL_TREE. */
9731 const char *p1
= c_getstr (s1
);
9732 if (p1
&& *p1
== '\0')
9734 /* Evaluate and ignore argument s2 in case it has
9736 return omit_one_operand_loc (loc
, size_type_node
,
9737 size_zero_node
, s2
);
9740 /* If the second argument is "", return __builtin_strlen(s1). */
9741 const char *p2
= c_getstr (s2
);
9742 if (p2
&& *p2
== '\0')
9744 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
9746 /* If the replacement _DECL isn't initialized, don't do the
9751 return build_call_expr_loc (loc
, fn
, 1, s1
);
9757 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9758 produced. False otherwise. This is done so that we don't output the error
9759 or warning twice or three times. */
9762 fold_builtin_next_arg (tree exp
, bool va_start_p
)
9764 tree fntype
= TREE_TYPE (current_function_decl
);
9765 int nargs
= call_expr_nargs (exp
);
9767 /* There is good chance the current input_location points inside the
9768 definition of the va_start macro (perhaps on the token for
9769 builtin) in a system header, so warnings will not be emitted.
9770 Use the location in real source code. */
9771 source_location current_location
=
9772 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
9775 if (!stdarg_p (fntype
))
9777 error ("%<va_start%> used in function with fixed args");
9783 if (va_start_p
&& (nargs
!= 2))
9785 error ("wrong number of arguments to function %<va_start%>");
9788 arg
= CALL_EXPR_ARG (exp
, 1);
9790 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9791 when we checked the arguments and if needed issued a warning. */
9796 /* Evidently an out of date version of <stdarg.h>; can't validate
9797 va_start's second argument, but can still work as intended. */
9798 warning_at (current_location
,
9800 "%<__builtin_next_arg%> called without an argument");
9805 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9808 arg
= CALL_EXPR_ARG (exp
, 0);
9811 if (TREE_CODE (arg
) == SSA_NAME
)
9812 arg
= SSA_NAME_VAR (arg
);
9814 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9815 or __builtin_next_arg (0) the first time we see it, after checking
9816 the arguments and if needed issuing a warning. */
9817 if (!integer_zerop (arg
))
9819 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
9821 /* Strip off all nops for the sake of the comparison. This
9822 is not quite the same as STRIP_NOPS. It does more.
9823 We must also strip off INDIRECT_EXPR for C++ reference
9825 while (CONVERT_EXPR_P (arg
)
9826 || TREE_CODE (arg
) == INDIRECT_REF
)
9827 arg
= TREE_OPERAND (arg
, 0);
9828 if (arg
!= last_parm
)
9830 /* FIXME: Sometimes with the tree optimizers we can get the
9831 not the last argument even though the user used the last
9832 argument. We just warn and set the arg to be the last
9833 argument so that we will get wrong-code because of
9835 warning_at (current_location
,
9837 "second parameter of %<va_start%> not last named argument");
9840 /* Undefined by C99 7.15.1.4p4 (va_start):
9841 "If the parameter parmN is declared with the register storage
9842 class, with a function or array type, or with a type that is
9843 not compatible with the type that results after application of
9844 the default argument promotions, the behavior is undefined."
9846 else if (DECL_REGISTER (arg
))
9848 warning_at (current_location
,
9850 "undefined behavior when second parameter of "
9851 "%<va_start%> is declared with %<register%> storage");
9854 /* We want to verify the second parameter just once before the tree
9855 optimizers are run and then avoid keeping it in the tree,
9856 as otherwise we could warn even for correct code like:
9857 void foo (int i, ...)
9858 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9860 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
9862 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
9868 /* Expand a call EXP to __builtin_object_size. */
9871 expand_builtin_object_size (tree exp
)
9874 int object_size_type
;
9875 tree fndecl
= get_callee_fndecl (exp
);
9877 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
9879 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
9881 expand_builtin_trap ();
9885 ost
= CALL_EXPR_ARG (exp
, 1);
9888 if (TREE_CODE (ost
) != INTEGER_CST
9889 || tree_int_cst_sgn (ost
) < 0
9890 || compare_tree_int (ost
, 3) > 0)
9892 error ("%Klast argument of %qD is not integer constant between 0 and 3",
9894 expand_builtin_trap ();
9898 object_size_type
= tree_to_shwi (ost
);
9900 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
9903 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9904 FCODE is the BUILT_IN_* to use.
9905 Return NULL_RTX if we failed; the caller should emit a normal call,
9906 otherwise try to get the result in TARGET, if convenient (and in
9907 mode MODE if that's convenient). */
9910 expand_builtin_memory_chk (tree exp
, rtx target
, machine_mode mode
,
9911 enum built_in_function fcode
)
9913 if (!validate_arglist (exp
,
9915 fcode
== BUILT_IN_MEMSET_CHK
9916 ? INTEGER_TYPE
: POINTER_TYPE
,
9917 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
9920 tree dest
= CALL_EXPR_ARG (exp
, 0);
9921 tree src
= CALL_EXPR_ARG (exp
, 1);
9922 tree len
= CALL_EXPR_ARG (exp
, 2);
9923 tree size
= CALL_EXPR_ARG (exp
, 3);
9925 bool sizes_ok
= check_access (exp
, dest
, src
, len
, /*maxread=*/NULL_TREE
,
9926 /*str=*/NULL_TREE
, size
);
9928 if (!tree_fits_uhwi_p (size
))
9931 if (tree_fits_uhwi_p (len
) || integer_all_onesp (size
))
9933 /* Avoid transforming the checking call to an ordinary one when
9934 an overflow has been detected or when the call couldn't be
9935 validated because the size is not constant. */
9936 if (!sizes_ok
&& !integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
9939 tree fn
= NULL_TREE
;
9940 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9941 mem{cpy,pcpy,move,set} is available. */
9944 case BUILT_IN_MEMCPY_CHK
:
9945 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
9947 case BUILT_IN_MEMPCPY_CHK
:
9948 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
9950 case BUILT_IN_MEMMOVE_CHK
:
9951 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
9953 case BUILT_IN_MEMSET_CHK
:
9954 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
9963 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
9964 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
9965 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
9966 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
9968 else if (fcode
== BUILT_IN_MEMSET_CHK
)
9972 unsigned int dest_align
= get_pointer_alignment (dest
);
9974 /* If DEST is not a pointer type, call the normal function. */
9975 if (dest_align
== 0)
9978 /* If SRC and DEST are the same (and not volatile), do nothing. */
9979 if (operand_equal_p (src
, dest
, 0))
9983 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
9985 /* Evaluate and ignore LEN in case it has side-effects. */
9986 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
9987 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
9990 expr
= fold_build_pointer_plus (dest
, len
);
9991 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
9994 /* __memmove_chk special case. */
9995 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
9997 unsigned int src_align
= get_pointer_alignment (src
);
10002 /* If src is categorized for a readonly section we can use
10003 normal __memcpy_chk. */
10004 if (readonly_data_expr (src
))
10006 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
10009 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
10010 dest
, src
, len
, size
);
10011 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
10012 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
10013 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
10020 /* Emit warning if a buffer overflow is detected at compile time. */
10023 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
10025 /* The source string. */
10026 tree srcstr
= NULL_TREE
;
10027 /* The size of the destination object. */
10028 tree objsize
= NULL_TREE
;
10029 /* The string that is being concatenated with (as in __strcat_chk)
10030 or null if it isn't. */
10031 tree catstr
= NULL_TREE
;
10032 /* The maximum length of the source sequence in a bounded operation
10033 (such as __strncat_chk) or null if the operation isn't bounded
10034 (such as __strcat_chk). */
10035 tree maxread
= NULL_TREE
;
10036 /* The exact size of the access (such as in __strncpy_chk). */
10037 tree size
= NULL_TREE
;
10041 case BUILT_IN_STRCPY_CHK
:
10042 case BUILT_IN_STPCPY_CHK
:
10043 srcstr
= CALL_EXPR_ARG (exp
, 1);
10044 objsize
= CALL_EXPR_ARG (exp
, 2);
10047 case BUILT_IN_STRCAT_CHK
:
10048 /* For __strcat_chk the warning will be emitted only if overflowing
10049 by at least strlen (dest) + 1 bytes. */
10050 catstr
= CALL_EXPR_ARG (exp
, 0);
10051 srcstr
= CALL_EXPR_ARG (exp
, 1);
10052 objsize
= CALL_EXPR_ARG (exp
, 2);
10055 case BUILT_IN_STRNCAT_CHK
:
10056 catstr
= CALL_EXPR_ARG (exp
, 0);
10057 srcstr
= CALL_EXPR_ARG (exp
, 1);
10058 maxread
= CALL_EXPR_ARG (exp
, 2);
10059 objsize
= CALL_EXPR_ARG (exp
, 3);
10062 case BUILT_IN_STRNCPY_CHK
:
10063 case BUILT_IN_STPNCPY_CHK
:
10064 srcstr
= CALL_EXPR_ARG (exp
, 1);
10065 size
= CALL_EXPR_ARG (exp
, 2);
10066 objsize
= CALL_EXPR_ARG (exp
, 3);
10069 case BUILT_IN_SNPRINTF_CHK
:
10070 case BUILT_IN_VSNPRINTF_CHK
:
10071 maxread
= CALL_EXPR_ARG (exp
, 1);
10072 objsize
= CALL_EXPR_ARG (exp
, 3);
10075 gcc_unreachable ();
10078 if (catstr
&& maxread
)
10080 /* Check __strncat_chk. There is no way to determine the length
10081 of the string to which the source string is being appended so
10082 just warn when the length of the source string is not known. */
10083 check_strncat_sizes (exp
, objsize
);
10087 /* The destination argument is the first one for all built-ins above. */
10088 tree dst
= CALL_EXPR_ARG (exp
, 0);
10090 check_access (exp
, dst
, srcstr
, size
, maxread
, srcstr
, objsize
);
10093 /* Emit warning if a buffer overflow is detected at compile time
10094 in __sprintf_chk/__vsprintf_chk calls. */
10097 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
10099 tree size
, len
, fmt
;
10100 const char *fmt_str
;
10101 int nargs
= call_expr_nargs (exp
);
10103 /* Verify the required arguments in the original call. */
10107 size
= CALL_EXPR_ARG (exp
, 2);
10108 fmt
= CALL_EXPR_ARG (exp
, 3);
10110 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
10113 /* Check whether the format is a literal string constant. */
10114 fmt_str
= c_getstr (fmt
);
10115 if (fmt_str
== NULL
)
10118 if (!init_target_chars ())
10121 /* If the format doesn't contain % args or %%, we know its size. */
10122 if (strchr (fmt_str
, target_percent
) == 0)
10123 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
10124 /* If the format is "%s" and first ... argument is a string literal,
10126 else if (fcode
== BUILT_IN_SPRINTF_CHK
10127 && strcmp (fmt_str
, target_percent_s
) == 0)
10133 arg
= CALL_EXPR_ARG (exp
, 4);
10134 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
10137 len
= c_strlen (arg
, 1);
10138 if (!len
|| ! tree_fits_uhwi_p (len
))
10144 /* Add one for the terminating nul. */
10145 len
= fold_build2 (PLUS_EXPR
, TREE_TYPE (len
), len
, size_one_node
);
10147 check_access (exp
, /*dst=*/NULL_TREE
, /*src=*/NULL_TREE
, /*size=*/NULL_TREE
,
10148 /*maxread=*/NULL_TREE
, len
, size
);
10151 /* Emit warning if a free is called with address of a variable. */
10154 maybe_emit_free_warning (tree exp
)
10156 tree arg
= CALL_EXPR_ARG (exp
, 0);
10159 if (TREE_CODE (arg
) != ADDR_EXPR
)
10162 arg
= get_base_address (TREE_OPERAND (arg
, 0));
10163 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
10166 if (SSA_VAR_P (arg
))
10167 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
10168 "%Kattempt to free a non-heap object %qD", exp
, arg
);
10170 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
10171 "%Kattempt to free a non-heap object", exp
);
10174 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10178 fold_builtin_object_size (tree ptr
, tree ost
)
10180 unsigned HOST_WIDE_INT bytes
;
10181 int object_size_type
;
10183 if (!validate_arg (ptr
, POINTER_TYPE
)
10184 || !validate_arg (ost
, INTEGER_TYPE
))
10189 if (TREE_CODE (ost
) != INTEGER_CST
10190 || tree_int_cst_sgn (ost
) < 0
10191 || compare_tree_int (ost
, 3) > 0)
10194 object_size_type
= tree_to_shwi (ost
);
10196 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10197 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10198 and (size_t) 0 for types 2 and 3. */
10199 if (TREE_SIDE_EFFECTS (ptr
))
10200 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
10202 if (TREE_CODE (ptr
) == ADDR_EXPR
)
10204 compute_builtin_object_size (ptr
, object_size_type
, &bytes
);
10205 if (wi::fits_to_tree_p (bytes
, size_type_node
))
10206 return build_int_cstu (size_type_node
, bytes
);
10208 else if (TREE_CODE (ptr
) == SSA_NAME
)
10210 /* If object size is not known yet, delay folding until
10211 later. Maybe subsequent passes will help determining
10213 if (compute_builtin_object_size (ptr
, object_size_type
, &bytes
)
10214 && wi::fits_to_tree_p (bytes
, size_type_node
))
10215 return build_int_cstu (size_type_node
, bytes
);
10221 /* Builtins with folding operations that operate on "..." arguments
10222 need special handling; we need to store the arguments in a convenient
10223 data structure before attempting any folding. Fortunately there are
10224 only a few builtins that fall into this category. FNDECL is the
10225 function, EXP is the CALL_EXPR for the call. */
10228 fold_builtin_varargs (location_t loc
, tree fndecl
, tree
*args
, int nargs
)
10230 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10231 tree ret
= NULL_TREE
;
10235 case BUILT_IN_FPCLASSIFY
:
10236 ret
= fold_builtin_fpclassify (loc
, args
, nargs
);
10244 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10245 SET_EXPR_LOCATION (ret
, loc
);
10246 TREE_NO_WARNING (ret
) = 1;
10252 /* Initialize format string characters in the target charset. */
10255 init_target_chars (void)
10260 target_newline
= lang_hooks
.to_target_charset ('\n');
10261 target_percent
= lang_hooks
.to_target_charset ('%');
10262 target_c
= lang_hooks
.to_target_charset ('c');
10263 target_s
= lang_hooks
.to_target_charset ('s');
10264 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
10268 target_percent_c
[0] = target_percent
;
10269 target_percent_c
[1] = target_c
;
10270 target_percent_c
[2] = '\0';
10272 target_percent_s
[0] = target_percent
;
10273 target_percent_s
[1] = target_s
;
10274 target_percent_s
[2] = '\0';
10276 target_percent_s_newline
[0] = target_percent
;
10277 target_percent_s_newline
[1] = target_s
;
10278 target_percent_s_newline
[2] = target_newline
;
10279 target_percent_s_newline
[3] = '\0';
10286 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10287 and no overflow/underflow occurred. INEXACT is true if M was not
10288 exactly calculated. TYPE is the tree type for the result. This
10289 function assumes that you cleared the MPFR flags and then
10290 calculated M to see if anything subsequently set a flag prior to
10291 entering this function. Return NULL_TREE if any checks fail. */
10294 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
10296 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10297 overflow/underflow occurred. If -frounding-math, proceed iff the
10298 result of calling FUNC was exact. */
10299 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10300 && (!flag_rounding_math
|| !inexact
))
10302 REAL_VALUE_TYPE rr
;
10304 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
10305 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10306 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10307 but the mpft_t is not, then we underflowed in the
10309 if (real_isfinite (&rr
)
10310 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
10312 REAL_VALUE_TYPE rmode
;
10314 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
10315 /* Proceed iff the specified mode can hold the value. */
10316 if (real_identical (&rmode
, &rr
))
10317 return build_real (type
, rmode
);
10323 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10324 number and no overflow/underflow occurred. INEXACT is true if M
10325 was not exactly calculated. TYPE is the tree type for the result.
10326 This function assumes that you cleared the MPFR flags and then
10327 calculated M to see if anything subsequently set a flag prior to
10328 entering this function. Return NULL_TREE if any checks fail, if
10329 FORCE_CONVERT is true, then bypass the checks. */
10332 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
10334 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10335 overflow/underflow occurred. If -frounding-math, proceed iff the
10336 result of calling FUNC was exact. */
10338 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
10339 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10340 && (!flag_rounding_math
|| !inexact
)))
10342 REAL_VALUE_TYPE re
, im
;
10344 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
10345 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
10346 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10347 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10348 but the mpft_t is not, then we underflowed in the
10351 || (real_isfinite (&re
) && real_isfinite (&im
)
10352 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
10353 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
10355 REAL_VALUE_TYPE re_mode
, im_mode
;
10357 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
10358 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
10359 /* Proceed iff the specified mode can hold the value. */
10361 || (real_identical (&re_mode
, &re
)
10362 && real_identical (&im_mode
, &im
)))
10363 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
10364 build_real (TREE_TYPE (type
), im_mode
));
10370 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10371 the pointer *(ARG_QUO) and return the result. The type is taken
10372 from the type of ARG0 and is used for setting the precision of the
10373 calculation and results. */
10376 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
10378 tree
const type
= TREE_TYPE (arg0
);
10379 tree result
= NULL_TREE
;
10384 /* To proceed, MPFR must exactly represent the target floating point
10385 format, which only happens when the target base equals two. */
10386 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
10387 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
10388 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
10390 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
10391 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
10393 if (real_isfinite (ra0
) && real_isfinite (ra1
))
10395 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
10396 const int prec
= fmt
->p
;
10397 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10402 mpfr_inits2 (prec
, m0
, m1
, NULL
);
10403 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
10404 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
10405 mpfr_clear_flags ();
10406 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
10407 /* Remquo is independent of the rounding mode, so pass
10408 inexact=0 to do_mpfr_ckconv(). */
10409 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
10410 mpfr_clears (m0
, m1
, NULL
);
10413 /* MPFR calculates quo in the host's long so it may
10414 return more bits in quo than the target int can hold
10415 if sizeof(host long) > sizeof(target int). This can
10416 happen even for native compilers in LP64 mode. In
10417 these cases, modulo the quo value with the largest
10418 number that the target int can hold while leaving one
10419 bit for the sign. */
10420 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
10421 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
10423 /* Dereference the quo pointer argument. */
10424 arg_quo
= build_fold_indirect_ref (arg_quo
);
10425 /* Proceed iff a valid pointer type was passed in. */
10426 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
10428 /* Set the value. */
10430 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
10431 build_int_cst (TREE_TYPE (arg_quo
),
10433 TREE_SIDE_EFFECTS (result_quo
) = 1;
10434 /* Combine the quo assignment with the rem. */
10435 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
10436 result_quo
, result_rem
));
10444 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10445 resulting value as a tree with type TYPE. The mpfr precision is
10446 set to the precision of TYPE. We assume that this mpfr function
10447 returns zero if the result could be calculated exactly within the
10448 requested precision. In addition, the integer pointer represented
10449 by ARG_SG will be dereferenced and set to the appropriate signgam
10453 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
10455 tree result
= NULL_TREE
;
10459 /* To proceed, MPFR must exactly represent the target floating point
10460 format, which only happens when the target base equals two. Also
10461 verify ARG is a constant and that ARG_SG is an int pointer. */
10462 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
10463 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
10464 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
10465 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
10467 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
10469 /* In addition to NaN and Inf, the argument cannot be zero or a
10470 negative integer. */
10471 if (real_isfinite (ra
)
10472 && ra
->cl
!= rvc_zero
10473 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
10475 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
10476 const int prec
= fmt
->p
;
10477 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10482 mpfr_init2 (m
, prec
);
10483 mpfr_from_real (m
, ra
, GMP_RNDN
);
10484 mpfr_clear_flags ();
10485 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
10486 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
10492 /* Dereference the arg_sg pointer argument. */
10493 arg_sg
= build_fold_indirect_ref (arg_sg
);
10494 /* Assign the signgam value into *arg_sg. */
10495 result_sg
= fold_build2 (MODIFY_EXPR
,
10496 TREE_TYPE (arg_sg
), arg_sg
,
10497 build_int_cst (TREE_TYPE (arg_sg
), sg
));
10498 TREE_SIDE_EFFECTS (result_sg
) = 1;
10499 /* Combine the signgam assignment with the lgamma result. */
10500 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
10501 result_sg
, result_lg
));
10509 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10510 mpc function FUNC on it and return the resulting value as a tree
10511 with type TYPE. The mpfr precision is set to the precision of
10512 TYPE. We assume that function FUNC returns zero if the result
10513 could be calculated exactly within the requested precision. If
10514 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10515 in the arguments and/or results. */
10518 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
10519 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
10521 tree result
= NULL_TREE
;
10526 /* To proceed, MPFR must exactly represent the target floating point
10527 format, which only happens when the target base equals two. */
10528 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
10529 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
10530 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
10531 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
10532 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
10534 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
10535 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
10536 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
10537 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
10540 || (real_isfinite (re0
) && real_isfinite (im0
)
10541 && real_isfinite (re1
) && real_isfinite (im1
)))
10543 const struct real_format
*const fmt
=
10544 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
10545 const int prec
= fmt
->p
;
10546 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
10547 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
10551 mpc_init2 (m0
, prec
);
10552 mpc_init2 (m1
, prec
);
10553 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
10554 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
10555 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
10556 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
10557 mpfr_clear_flags ();
10558 inexact
= func (m0
, m0
, m1
, crnd
);
10559 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
10568 /* A wrapper function for builtin folding that prevents warnings for
10569 "statement without effect" and the like, caused by removing the
10570 call node earlier than the warning is generated. */
10573 fold_call_stmt (gcall
*stmt
, bool ignore
)
10575 tree ret
= NULL_TREE
;
10576 tree fndecl
= gimple_call_fndecl (stmt
);
10577 location_t loc
= gimple_location (stmt
);
10579 && TREE_CODE (fndecl
) == FUNCTION_DECL
10580 && DECL_BUILT_IN (fndecl
)
10581 && !gimple_call_va_arg_pack_p (stmt
))
10583 int nargs
= gimple_call_num_args (stmt
);
10584 tree
*args
= (nargs
> 0
10585 ? gimple_call_arg_ptr (stmt
, 0)
10586 : &error_mark_node
);
10588 if (avoid_folding_inline_builtin (fndecl
))
10590 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10592 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
10596 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
10599 /* Propagate location information from original call to
10600 expansion of builtin. Otherwise things like
10601 maybe_emit_chk_warning, that operate on the expansion
10602 of a builtin, will use the wrong location information. */
10603 if (gimple_has_location (stmt
))
10605 tree realret
= ret
;
10606 if (TREE_CODE (ret
) == NOP_EXPR
)
10607 realret
= TREE_OPERAND (ret
, 0);
10608 if (CAN_HAVE_LOCATION_P (realret
)
10609 && !EXPR_HAS_LOCATION (realret
))
10610 SET_EXPR_LOCATION (realret
, loc
);
10620 /* Look up the function in builtin_decl that corresponds to DECL
10621 and set ASMSPEC as its user assembler name. DECL must be a
10622 function decl that declares a builtin. */
10625 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
10627 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
10628 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
10631 tree builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
10632 set_user_assembler_name (builtin
, asmspec
);
10634 if (DECL_FUNCTION_CODE (decl
) == BUILT_IN_FFS
10635 && INT_TYPE_SIZE
< BITS_PER_WORD
)
10637 scalar_int_mode mode
= int_mode_for_size (INT_TYPE_SIZE
, 0).require ();
10638 set_user_assembler_libfunc ("ffs", asmspec
);
10639 set_optab_libfunc (ffs_optab
, mode
, "ffs");
10643 /* Return true if DECL is a builtin that expands to a constant or similarly
10646 is_simple_builtin (tree decl
)
10648 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
10649 switch (DECL_FUNCTION_CODE (decl
))
10651 /* Builtins that expand to constants. */
10652 case BUILT_IN_CONSTANT_P
:
10653 case BUILT_IN_EXPECT
:
10654 case BUILT_IN_OBJECT_SIZE
:
10655 case BUILT_IN_UNREACHABLE
:
10656 /* Simple register moves or loads from stack. */
10657 case BUILT_IN_ASSUME_ALIGNED
:
10658 case BUILT_IN_RETURN_ADDRESS
:
10659 case BUILT_IN_EXTRACT_RETURN_ADDR
:
10660 case BUILT_IN_FROB_RETURN_ADDR
:
10661 case BUILT_IN_RETURN
:
10662 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
10663 case BUILT_IN_FRAME_ADDRESS
:
10664 case BUILT_IN_VA_END
:
10665 case BUILT_IN_STACK_SAVE
:
10666 case BUILT_IN_STACK_RESTORE
:
10667 /* Exception state returns or moves registers around. */
10668 case BUILT_IN_EH_FILTER
:
10669 case BUILT_IN_EH_POINTER
:
10670 case BUILT_IN_EH_COPY_VALUES
:
10680 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10681 most probably expanded inline into reasonably simple code. This is a
10682 superset of is_simple_builtin. */
10684 is_inexpensive_builtin (tree decl
)
10688 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
10690 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
10691 switch (DECL_FUNCTION_CODE (decl
))
10694 CASE_BUILT_IN_ALLOCA
:
10695 case BUILT_IN_BSWAP16
:
10696 case BUILT_IN_BSWAP32
:
10697 case BUILT_IN_BSWAP64
:
10699 case BUILT_IN_CLZIMAX
:
10700 case BUILT_IN_CLZL
:
10701 case BUILT_IN_CLZLL
:
10703 case BUILT_IN_CTZIMAX
:
10704 case BUILT_IN_CTZL
:
10705 case BUILT_IN_CTZLL
:
10707 case BUILT_IN_FFSIMAX
:
10708 case BUILT_IN_FFSL
:
10709 case BUILT_IN_FFSLL
:
10710 case BUILT_IN_IMAXABS
:
10711 case BUILT_IN_FINITE
:
10712 case BUILT_IN_FINITEF
:
10713 case BUILT_IN_FINITEL
:
10714 case BUILT_IN_FINITED32
:
10715 case BUILT_IN_FINITED64
:
10716 case BUILT_IN_FINITED128
:
10717 case BUILT_IN_FPCLASSIFY
:
10718 case BUILT_IN_ISFINITE
:
10719 case BUILT_IN_ISINF_SIGN
:
10720 case BUILT_IN_ISINF
:
10721 case BUILT_IN_ISINFF
:
10722 case BUILT_IN_ISINFL
:
10723 case BUILT_IN_ISINFD32
:
10724 case BUILT_IN_ISINFD64
:
10725 case BUILT_IN_ISINFD128
:
10726 case BUILT_IN_ISNAN
:
10727 case BUILT_IN_ISNANF
:
10728 case BUILT_IN_ISNANL
:
10729 case BUILT_IN_ISNAND32
:
10730 case BUILT_IN_ISNAND64
:
10731 case BUILT_IN_ISNAND128
:
10732 case BUILT_IN_ISNORMAL
:
10733 case BUILT_IN_ISGREATER
:
10734 case BUILT_IN_ISGREATEREQUAL
:
10735 case BUILT_IN_ISLESS
:
10736 case BUILT_IN_ISLESSEQUAL
:
10737 case BUILT_IN_ISLESSGREATER
:
10738 case BUILT_IN_ISUNORDERED
:
10739 case BUILT_IN_VA_ARG_PACK
:
10740 case BUILT_IN_VA_ARG_PACK_LEN
:
10741 case BUILT_IN_VA_COPY
:
10742 case BUILT_IN_TRAP
:
10743 case BUILT_IN_SAVEREGS
:
10744 case BUILT_IN_POPCOUNTL
:
10745 case BUILT_IN_POPCOUNTLL
:
10746 case BUILT_IN_POPCOUNTIMAX
:
10747 case BUILT_IN_POPCOUNT
:
10748 case BUILT_IN_PARITYL
:
10749 case BUILT_IN_PARITYLL
:
10750 case BUILT_IN_PARITYIMAX
:
10751 case BUILT_IN_PARITY
:
10752 case BUILT_IN_LABS
:
10753 case BUILT_IN_LLABS
:
10754 case BUILT_IN_PREFETCH
:
10755 case BUILT_IN_ACC_ON_DEVICE
:
10759 return is_simple_builtin (decl
);
10765 /* Return true if T is a constant and the value cast to a target char
10766 can be represented by a host char.
10767 Store the casted char constant in *P if so. */
10770 target_char_cst_p (tree t
, char *p
)
10772 if (!tree_fits_uhwi_p (t
) || CHAR_TYPE_SIZE
!= HOST_BITS_PER_CHAR
)
10775 *p
= (char)tree_to_uhwi (t
);
10779 /* Return the maximum object size. */
10782 max_object_size (void)
10784 /* To do: Make this a configurable parameter. */
10785 return TYPE_MAX_VALUE (ptrdiff_type_node
);