1 /* Expand builtin functions.
2 Copyright (C) 1988-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
26 #include "coretypes.h"
35 #include "stringpool.h"
37 #include "tree-ssanames.h"
42 #include "diagnostic-core.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "stor-layout.h"
49 #include "tree-object-size.h"
59 #include "typeclass.h"
60 #include "langhooks.h"
61 #include "value-prof.h"
65 #include "tree-chkp.h"
67 #include "internal-fn.h"
68 #include "case-cfn-macros.h"
69 #include "gimple-fold.h"
72 struct target_builtins default_target_builtins
;
74 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
77 /* Define the names of the builtin function types and codes. */
78 const char *const built_in_class_names
[BUILT_IN_LAST
]
79 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
81 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
82 const char * built_in_names
[(int) END_BUILTINS
] =
84 #include "builtins.def"
87 /* Setup an array of builtin_info_type, make sure each element decl is
88 initialized to NULL_TREE. */
89 builtin_info_type builtin_info
[(int)END_BUILTINS
];
91 /* Non-zero if __builtin_constant_p should be folded right away. */
92 bool force_folding_builtin_constant_p
;
94 static rtx
c_readstr (const char *, machine_mode
);
95 static int target_char_cast (tree
, char *);
96 static rtx
get_memory_rtx (tree
, tree
);
97 static int apply_args_size (void);
98 static int apply_result_size (void);
99 static rtx
result_vector (int, rtx
);
100 static void expand_builtin_prefetch (tree
);
101 static rtx
expand_builtin_apply_args (void);
102 static rtx
expand_builtin_apply_args_1 (void);
103 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
104 static void expand_builtin_return (rtx
);
105 static enum type_class
type_to_class (tree
);
106 static rtx
expand_builtin_classify_type (tree
);
107 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
108 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
109 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
110 static rtx
expand_builtin_sincos (tree
);
111 static rtx
expand_builtin_cexpi (tree
, rtx
);
112 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
113 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
114 static rtx
expand_builtin_next_arg (void);
115 static rtx
expand_builtin_va_start (tree
);
116 static rtx
expand_builtin_va_end (tree
);
117 static rtx
expand_builtin_va_copy (tree
);
118 static rtx
expand_builtin_strcmp (tree
, rtx
);
119 static rtx
expand_builtin_strncmp (tree
, rtx
, machine_mode
);
120 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, machine_mode
);
121 static rtx
expand_builtin_memcpy (tree
, rtx
);
122 static rtx
expand_builtin_memcpy_with_bounds (tree
, rtx
);
123 static rtx
expand_builtin_memcpy_args (tree
, tree
, tree
, rtx
, tree
);
124 static rtx
expand_builtin_mempcpy (tree
, rtx
, machine_mode
);
125 static rtx
expand_builtin_mempcpy_with_bounds (tree
, rtx
, machine_mode
);
126 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
,
127 machine_mode
, int, tree
);
128 static rtx
expand_builtin_strcpy (tree
, rtx
);
129 static rtx
expand_builtin_strcpy_args (tree
, tree
, rtx
);
130 static rtx
expand_builtin_stpcpy (tree
, rtx
, machine_mode
);
131 static rtx
expand_builtin_strncpy (tree
, rtx
);
132 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, machine_mode
);
133 static rtx
expand_builtin_memset (tree
, rtx
, machine_mode
);
134 static rtx
expand_builtin_memset_with_bounds (tree
, rtx
, machine_mode
);
135 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, machine_mode
, tree
);
136 static rtx
expand_builtin_bzero (tree
);
137 static rtx
expand_builtin_strlen (tree
, rtx
, machine_mode
);
138 static rtx
expand_builtin_alloca (tree
, bool);
139 static rtx
expand_builtin_unop (machine_mode
, tree
, rtx
, rtx
, optab
);
140 static rtx
expand_builtin_frame_address (tree
, tree
);
141 static tree
stabilize_va_list_loc (location_t
, tree
, int);
142 static rtx
expand_builtin_expect (tree
, rtx
);
143 static tree
fold_builtin_constant_p (tree
);
144 static tree
fold_builtin_classify_type (tree
);
145 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
146 static tree
fold_builtin_inf (location_t
, tree
, int);
147 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
148 static bool validate_arg (const_tree
, enum tree_code code
);
149 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
150 static rtx
expand_builtin_signbit (tree
, rtx
);
151 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
152 static tree
fold_builtin_isascii (location_t
, tree
);
153 static tree
fold_builtin_toascii (location_t
, tree
);
154 static tree
fold_builtin_isdigit (location_t
, tree
);
155 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
156 static tree
fold_builtin_abs (location_t
, tree
, tree
);
157 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
159 static tree
fold_builtin_0 (location_t
, tree
);
160 static tree
fold_builtin_1 (location_t
, tree
, tree
);
161 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
);
162 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
);
163 static tree
fold_builtin_varargs (location_t
, tree
, tree
*, int);
165 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
166 static tree
fold_builtin_strstr (location_t
, tree
, tree
, tree
);
167 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
168 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
170 static rtx
expand_builtin_object_size (tree
);
171 static rtx
expand_builtin_memory_chk (tree
, rtx
, machine_mode
,
172 enum built_in_function
);
173 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
174 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
175 static void maybe_emit_free_warning (tree
);
176 static tree
fold_builtin_object_size (tree
, tree
);
178 unsigned HOST_WIDE_INT target_newline
;
179 unsigned HOST_WIDE_INT target_percent
;
180 static unsigned HOST_WIDE_INT target_c
;
181 static unsigned HOST_WIDE_INT target_s
;
182 char target_percent_c
[3];
183 char target_percent_s
[3];
184 char target_percent_s_newline
[4];
185 static tree
do_mpfr_remquo (tree
, tree
, tree
);
186 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
187 static void expand_builtin_sync_synchronize (void);
189 /* Return true if NAME starts with __builtin_ or __sync_. */
192 is_builtin_name (const char *name
)
194 if (strncmp (name
, "__builtin_", 10) == 0)
196 if (strncmp (name
, "__sync_", 7) == 0)
198 if (strncmp (name
, "__atomic_", 9) == 0)
201 && (!strcmp (name
, "__cilkrts_detach")
202 || !strcmp (name
, "__cilkrts_pop_frame")))
208 /* Return true if DECL is a function symbol representing a built-in. */
211 is_builtin_fn (tree decl
)
213 return TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
);
216 /* Return true if NODE should be considered for inline expansion regardless
217 of the optimization level. This means whenever a function is invoked with
218 its "internal" name, which normally contains the prefix "__builtin". */
221 called_as_built_in (tree node
)
223 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
224 we want the name used to call the function, not the name it
226 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
227 return is_builtin_name (name
);
230 /* Compute values M and N such that M divides (address of EXP - N) and such
231 that N < M. If these numbers can be determined, store M in alignp and N in
232 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
233 *alignp and any bit-offset to *bitposp.
235 Note that the address (and thus the alignment) computed here is based
236 on the address to which a symbol resolves, whereas DECL_ALIGN is based
237 on the address at which an object is actually located. These two
238 addresses are not always the same. For example, on ARM targets,
239 the address &foo of a Thumb function foo() has the lowest bit set,
240 whereas foo() itself starts on an even address.
242 If ADDR_P is true we are taking the address of the memory reference EXP
243 and thus cannot rely on the access taking place. */
246 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
247 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
249 HOST_WIDE_INT bitsize
, bitpos
;
252 int unsignedp
, reversep
, volatilep
;
253 unsigned int align
= BITS_PER_UNIT
;
254 bool known_alignment
= false;
256 /* Get the innermost object and the constant (bitpos) and possibly
257 variable (offset) offset of the access. */
258 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
259 &unsignedp
, &reversep
, &volatilep
);
261 /* Extract alignment information from the innermost object and
262 possibly adjust bitpos and offset. */
263 if (TREE_CODE (exp
) == FUNCTION_DECL
)
265 /* Function addresses can encode extra information besides their
266 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
267 allows the low bit to be used as a virtual bit, we know
268 that the address itself must be at least 2-byte aligned. */
269 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
270 align
= 2 * BITS_PER_UNIT
;
272 else if (TREE_CODE (exp
) == LABEL_DECL
)
274 else if (TREE_CODE (exp
) == CONST_DECL
)
276 /* The alignment of a CONST_DECL is determined by its initializer. */
277 exp
= DECL_INITIAL (exp
);
278 align
= TYPE_ALIGN (TREE_TYPE (exp
));
279 if (CONSTANT_CLASS_P (exp
))
280 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
282 known_alignment
= true;
284 else if (DECL_P (exp
))
286 align
= DECL_ALIGN (exp
);
287 known_alignment
= true;
289 else if (TREE_CODE (exp
) == INDIRECT_REF
290 || TREE_CODE (exp
) == MEM_REF
291 || TREE_CODE (exp
) == TARGET_MEM_REF
)
293 tree addr
= TREE_OPERAND (exp
, 0);
295 unsigned HOST_WIDE_INT ptr_bitpos
;
296 unsigned HOST_WIDE_INT ptr_bitmask
= ~0;
298 /* If the address is explicitely aligned, handle that. */
299 if (TREE_CODE (addr
) == BIT_AND_EXPR
300 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
302 ptr_bitmask
= TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1));
303 ptr_bitmask
*= BITS_PER_UNIT
;
304 align
= least_bit_hwi (ptr_bitmask
);
305 addr
= TREE_OPERAND (addr
, 0);
309 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
310 align
= MAX (ptr_align
, align
);
312 /* Re-apply explicit alignment to the bitpos. */
313 ptr_bitpos
&= ptr_bitmask
;
315 /* The alignment of the pointer operand in a TARGET_MEM_REF
316 has to take the variable offset parts into account. */
317 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
321 unsigned HOST_WIDE_INT step
= 1;
323 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
324 align
= MIN (align
, least_bit_hwi (step
) * BITS_PER_UNIT
);
326 if (TMR_INDEX2 (exp
))
327 align
= BITS_PER_UNIT
;
328 known_alignment
= false;
331 /* When EXP is an actual memory reference then we can use
332 TYPE_ALIGN of a pointer indirection to derive alignment.
333 Do so only if get_pointer_alignment_1 did not reveal absolute
334 alignment knowledge and if using that alignment would
335 improve the situation. */
336 if (!addr_p
&& !known_alignment
337 && TYPE_ALIGN (TREE_TYPE (exp
)) > align
)
338 align
= TYPE_ALIGN (TREE_TYPE (exp
));
341 /* Else adjust bitpos accordingly. */
342 bitpos
+= ptr_bitpos
;
343 if (TREE_CODE (exp
) == MEM_REF
344 || TREE_CODE (exp
) == TARGET_MEM_REF
)
345 bitpos
+= mem_ref_offset (exp
).to_short_addr () * BITS_PER_UNIT
;
348 else if (TREE_CODE (exp
) == STRING_CST
)
350 /* STRING_CST are the only constant objects we allow to be not
351 wrapped inside a CONST_DECL. */
352 align
= TYPE_ALIGN (TREE_TYPE (exp
));
353 if (CONSTANT_CLASS_P (exp
))
354 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
356 known_alignment
= true;
359 /* If there is a non-constant offset part extract the maximum
360 alignment that can prevail. */
363 unsigned int trailing_zeros
= tree_ctz (offset
);
364 if (trailing_zeros
< HOST_BITS_PER_INT
)
366 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
368 align
= MIN (align
, inner
);
373 *bitposp
= bitpos
& (*alignp
- 1);
374 return known_alignment
;
377 /* For a memory reference expression EXP compute values M and N such that M
378 divides (&EXP - N) and such that N < M. If these numbers can be determined,
379 store M in alignp and N in *BITPOSP and return true. Otherwise return false
380 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
383 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
384 unsigned HOST_WIDE_INT
*bitposp
)
386 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
389 /* Return the alignment in bits of EXP, an object. */
392 get_object_alignment (tree exp
)
394 unsigned HOST_WIDE_INT bitpos
= 0;
397 get_object_alignment_1 (exp
, &align
, &bitpos
);
399 /* align and bitpos now specify known low bits of the pointer.
400 ptr & (align - 1) == bitpos. */
403 align
= least_bit_hwi (bitpos
);
407 /* For a pointer valued expression EXP compute values M and N such that M
408 divides (EXP - N) and such that N < M. If these numbers can be determined,
409 store M in alignp and N in *BITPOSP and return true. Return false if
410 the results are just a conservative approximation.
412 If EXP is not a pointer, false is returned too. */
415 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
416 unsigned HOST_WIDE_INT
*bitposp
)
420 if (TREE_CODE (exp
) == ADDR_EXPR
)
421 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
422 alignp
, bitposp
, true);
423 else if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
)
426 unsigned HOST_WIDE_INT bitpos
;
427 bool res
= get_pointer_alignment_1 (TREE_OPERAND (exp
, 0),
429 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
430 bitpos
+= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
;
433 unsigned int trailing_zeros
= tree_ctz (TREE_OPERAND (exp
, 1));
434 if (trailing_zeros
< HOST_BITS_PER_INT
)
436 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
438 align
= MIN (align
, inner
);
442 *bitposp
= bitpos
& (align
- 1);
445 else if (TREE_CODE (exp
) == SSA_NAME
446 && POINTER_TYPE_P (TREE_TYPE (exp
)))
448 unsigned int ptr_align
, ptr_misalign
;
449 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
451 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
453 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
454 *alignp
= ptr_align
* BITS_PER_UNIT
;
455 /* Make sure to return a sensible alignment when the multiplication
456 by BITS_PER_UNIT overflowed. */
458 *alignp
= 1u << (HOST_BITS_PER_INT
- 1);
459 /* We cannot really tell whether this result is an approximation. */
465 *alignp
= BITS_PER_UNIT
;
469 else if (TREE_CODE (exp
) == INTEGER_CST
)
471 *alignp
= BIGGEST_ALIGNMENT
;
472 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
473 & (BIGGEST_ALIGNMENT
- 1));
478 *alignp
= BITS_PER_UNIT
;
482 /* Return the alignment in bits of EXP, a pointer valued expression.
483 The alignment returned is, by default, the alignment of the thing that
484 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
486 Otherwise, look at the expression to see if we can do better, i.e., if the
487 expression is actually pointing at an object whose alignment is tighter. */
490 get_pointer_alignment (tree exp
)
492 unsigned HOST_WIDE_INT bitpos
= 0;
495 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
497 /* align and bitpos now specify known low bits of the pointer.
498 ptr & (align - 1) == bitpos. */
501 align
= least_bit_hwi (bitpos
);
506 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
507 way, because it could contain a zero byte in the middle.
508 TREE_STRING_LENGTH is the size of the character array, not the string.
510 ONLY_VALUE should be nonzero if the result is not going to be emitted
511 into the instruction stream and zero if it is going to be expanded.
512 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
513 is returned, otherwise NULL, since
514 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
515 evaluate the side-effects.
517 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
518 accesses. Note that this implies the result is not going to be emitted
519 into the instruction stream.
521 The value returned is of type `ssizetype'.
523 Unfortunately, string_constant can't access the values of const char
524 arrays with initializers, so neither can we do so here. */
527 c_strlen (tree src
, int only_value
)
530 HOST_WIDE_INT offset
;
536 if (TREE_CODE (src
) == COND_EXPR
537 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
541 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
542 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
543 if (tree_int_cst_equal (len1
, len2
))
547 if (TREE_CODE (src
) == COMPOUND_EXPR
548 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
549 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
551 loc
= EXPR_LOC_OR_LOC (src
, input_location
);
553 src
= string_constant (src
, &offset_node
);
557 max
= TREE_STRING_LENGTH (src
) - 1;
558 ptr
= TREE_STRING_POINTER (src
);
560 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
562 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
563 compute the offset to the following null if we don't know where to
564 start searching for it. */
567 for (i
= 0; i
< max
; i
++)
571 /* We don't know the starting offset, but we do know that the string
572 has no internal zero bytes. We can assume that the offset falls
573 within the bounds of the string; otherwise, the programmer deserves
574 what he gets. Subtract the offset from the length of the string,
575 and return that. This would perhaps not be valid if we were dealing
576 with named arrays in addition to literal string constants. */
578 return size_diffop_loc (loc
, size_int (max
), offset_node
);
581 /* We have a known offset into the string. Start searching there for
582 a null character if we can represent it as a single HOST_WIDE_INT. */
583 if (offset_node
== 0)
585 else if (! tree_fits_shwi_p (offset_node
))
588 offset
= tree_to_shwi (offset_node
);
590 /* If the offset is known to be out of bounds, warn, and call strlen at
592 if (offset
< 0 || offset
> max
)
594 /* Suppress multiple warnings for propagated constant strings. */
596 && !TREE_NO_WARNING (src
))
598 warning_at (loc
, 0, "offset outside bounds of constant string");
599 TREE_NO_WARNING (src
) = 1;
604 /* Use strlen to search for the first zero byte. Since any strings
605 constructed with build_string will have nulls appended, we win even
606 if we get handed something like (char[4])"abcd".
608 Since OFFSET is our starting index into the string, no further
609 calculation is needed. */
610 return ssize_int (strlen (ptr
+ offset
));
613 /* Return a constant integer corresponding to target reading
614 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
617 c_readstr (const char *str
, machine_mode mode
)
621 HOST_WIDE_INT tmp
[MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
];
623 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
624 unsigned int len
= (GET_MODE_PRECISION (mode
) + HOST_BITS_PER_WIDE_INT
- 1)
625 / HOST_BITS_PER_WIDE_INT
;
627 gcc_assert (len
<= MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
);
628 for (i
= 0; i
< len
; i
++)
632 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
635 if (WORDS_BIG_ENDIAN
)
636 j
= GET_MODE_SIZE (mode
) - i
- 1;
637 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
638 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
639 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
643 ch
= (unsigned char) str
[i
];
644 tmp
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
647 wide_int c
= wide_int::from_array (tmp
, len
, GET_MODE_PRECISION (mode
));
648 return immed_wide_int_const (c
, mode
);
651 /* Cast a target constant CST to target CHAR and if that value fits into
652 host char type, return zero and put that value into variable pointed to by
656 target_char_cast (tree cst
, char *p
)
658 unsigned HOST_WIDE_INT val
, hostval
;
660 if (TREE_CODE (cst
) != INTEGER_CST
661 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
664 /* Do not care if it fits or not right here. */
665 val
= TREE_INT_CST_LOW (cst
);
667 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
668 val
&= (HOST_WIDE_INT_1U
<< CHAR_TYPE_SIZE
) - 1;
671 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
672 hostval
&= (HOST_WIDE_INT_1U
<< HOST_BITS_PER_CHAR
) - 1;
681 /* Similar to save_expr, but assumes that arbitrary code is not executed
682 in between the multiple evaluations. In particular, we assume that a
683 non-addressable local variable will not be modified. */
686 builtin_save_expr (tree exp
)
688 if (TREE_CODE (exp
) == SSA_NAME
689 || (TREE_ADDRESSABLE (exp
) == 0
690 && (TREE_CODE (exp
) == PARM_DECL
691 || (VAR_P (exp
) && !TREE_STATIC (exp
)))))
694 return save_expr (exp
);
697 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
698 times to get the address of either a higher stack frame, or a return
699 address located within it (depending on FNDECL_CODE). */
702 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
705 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
708 /* For a zero count with __builtin_return_address, we don't care what
709 frame address we return, because target-specific definitions will
710 override us. Therefore frame pointer elimination is OK, and using
711 the soft frame pointer is OK.
713 For a nonzero count, or a zero count with __builtin_frame_address,
714 we require a stable offset from the current frame pointer to the
715 previous one, so we must use the hard frame pointer, and
716 we must disable frame pointer elimination. */
717 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
718 tem
= frame_pointer_rtx
;
721 tem
= hard_frame_pointer_rtx
;
723 /* Tell reload not to eliminate the frame pointer. */
724 crtl
->accesses_prior_frames
= 1;
729 SETUP_FRAME_ADDRESSES ();
731 /* On the SPARC, the return address is not in the frame, it is in a
732 register. There is no way to access it off of the current frame
733 pointer, but it can be accessed off the previous frame pointer by
734 reading the value from the register window save area. */
735 if (RETURN_ADDR_IN_PREVIOUS_FRAME
&& fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
738 /* Scan back COUNT frames to the specified frame. */
739 for (i
= 0; i
< count
; i
++)
741 /* Assume the dynamic chain pointer is in the word that the
742 frame address points to, unless otherwise specified. */
743 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
744 tem
= memory_address (Pmode
, tem
);
745 tem
= gen_frame_mem (Pmode
, tem
);
746 tem
= copy_to_reg (tem
);
749 /* For __builtin_frame_address, return what we've got. But, on
750 the SPARC for example, we may have to add a bias. */
751 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
752 return FRAME_ADDR_RTX (tem
);
754 /* For __builtin_return_address, get the return address from that frame. */
755 #ifdef RETURN_ADDR_RTX
756 tem
= RETURN_ADDR_RTX (count
, tem
);
758 tem
= memory_address (Pmode
,
759 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
760 tem
= gen_frame_mem (Pmode
, tem
);
765 /* Alias set used for setjmp buffer. */
766 static alias_set_type setjmp_alias_set
= -1;
768 /* Construct the leading half of a __builtin_setjmp call. Control will
769 return to RECEIVER_LABEL. This is also called directly by the SJLJ
770 exception handling code. */
773 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
775 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
779 if (setjmp_alias_set
== -1)
780 setjmp_alias_set
= new_alias_set ();
782 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
784 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
786 /* We store the frame pointer and the address of receiver_label in
787 the buffer and use the rest of it for the stack save area, which
788 is machine-dependent. */
790 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
791 set_mem_alias_set (mem
, setjmp_alias_set
);
792 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
794 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
795 GET_MODE_SIZE (Pmode
))),
796 set_mem_alias_set (mem
, setjmp_alias_set
);
798 emit_move_insn (validize_mem (mem
),
799 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
801 stack_save
= gen_rtx_MEM (sa_mode
,
802 plus_constant (Pmode
, buf_addr
,
803 2 * GET_MODE_SIZE (Pmode
)));
804 set_mem_alias_set (stack_save
, setjmp_alias_set
);
805 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
807 /* If there is further processing to do, do it. */
808 if (targetm
.have_builtin_setjmp_setup ())
809 emit_insn (targetm
.gen_builtin_setjmp_setup (buf_addr
));
811 /* We have a nonlocal label. */
812 cfun
->has_nonlocal_label
= 1;
815 /* Construct the trailing part of a __builtin_setjmp call. This is
816 also called directly by the SJLJ exception handling code.
817 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
820 expand_builtin_setjmp_receiver (rtx receiver_label
)
824 /* Mark the FP as used when we get here, so we have to make sure it's
825 marked as used by this function. */
826 emit_use (hard_frame_pointer_rtx
);
828 /* Mark the static chain as clobbered here so life information
829 doesn't get messed up for it. */
830 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
831 if (chain
&& REG_P (chain
))
832 emit_clobber (chain
);
834 /* Now put in the code to restore the frame pointer, and argument
835 pointer, if needed. */
836 if (! targetm
.have_nonlocal_goto ())
838 /* First adjust our frame pointer to its actual value. It was
839 previously set to the start of the virtual area corresponding to
840 the stacked variables when we branched here and now needs to be
841 adjusted to the actual hardware fp value.
843 Assignments to virtual registers are converted by
844 instantiate_virtual_regs into the corresponding assignment
845 to the underlying register (fp in this case) that makes
846 the original assignment true.
847 So the following insn will actually be decrementing fp by
848 STARTING_FRAME_OFFSET. */
849 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
851 /* Restoring the frame pointer also modifies the hard frame pointer.
852 Mark it used (so that the previous assignment remains live once
853 the frame pointer is eliminated) and clobbered (to represent the
854 implicit update from the assignment). */
855 emit_use (hard_frame_pointer_rtx
);
856 emit_clobber (hard_frame_pointer_rtx
);
859 if (!HARD_FRAME_POINTER_IS_ARG_POINTER
&& fixed_regs
[ARG_POINTER_REGNUM
])
861 /* If the argument pointer can be eliminated in favor of the
862 frame pointer, we don't need to restore it. We assume here
863 that if such an elimination is present, it can always be used.
864 This is the case on all known machines; if we don't make this
865 assumption, we do unnecessary saving on many machines. */
867 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
869 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
870 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
871 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
874 if (i
== ARRAY_SIZE (elim_regs
))
876 /* Now restore our arg pointer from the address at which it
877 was saved in our stack frame. */
878 emit_move_insn (crtl
->args
.internal_arg_pointer
,
879 copy_to_reg (get_arg_pointer_save_area ()));
883 if (receiver_label
!= NULL
&& targetm
.have_builtin_setjmp_receiver ())
884 emit_insn (targetm
.gen_builtin_setjmp_receiver (receiver_label
));
885 else if (targetm
.have_nonlocal_goto_receiver ())
886 emit_insn (targetm
.gen_nonlocal_goto_receiver ());
890 /* We must not allow the code we just generated to be reordered by
891 scheduling. Specifically, the update of the frame pointer must
892 happen immediately, not later. */
893 emit_insn (gen_blockage ());
896 /* __builtin_longjmp is passed a pointer to an array of five words (not
897 all will be used on all machines). It operates similarly to the C
898 library function of the same name, but is more efficient. Much of
899 the code below is copied from the handling of non-local gotos. */
902 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
905 rtx_insn
*insn
, *last
;
906 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
908 /* DRAP is needed for stack realign if longjmp is expanded to current
910 if (SUPPORTS_STACK_ALIGNMENT
)
911 crtl
->need_drap
= true;
913 if (setjmp_alias_set
== -1)
914 setjmp_alias_set
= new_alias_set ();
916 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
918 buf_addr
= force_reg (Pmode
, buf_addr
);
920 /* We require that the user must pass a second argument of 1, because
921 that is what builtin_setjmp will return. */
922 gcc_assert (value
== const1_rtx
);
924 last
= get_last_insn ();
925 if (targetm
.have_builtin_longjmp ())
926 emit_insn (targetm
.gen_builtin_longjmp (buf_addr
));
929 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
930 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
931 GET_MODE_SIZE (Pmode
)));
933 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
934 2 * GET_MODE_SIZE (Pmode
)));
935 set_mem_alias_set (fp
, setjmp_alias_set
);
936 set_mem_alias_set (lab
, setjmp_alias_set
);
937 set_mem_alias_set (stack
, setjmp_alias_set
);
939 /* Pick up FP, label, and SP from the block and jump. This code is
940 from expand_goto in stmt.c; see there for detailed comments. */
941 if (targetm
.have_nonlocal_goto ())
942 /* We have to pass a value to the nonlocal_goto pattern that will
943 get copied into the static_chain pointer, but it does not matter
944 what that value is, because builtin_setjmp does not use it. */
945 emit_insn (targetm
.gen_nonlocal_goto (value
, lab
, stack
, fp
));
948 lab
= copy_to_reg (lab
);
950 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
951 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
953 emit_move_insn (hard_frame_pointer_rtx
, fp
);
954 emit_stack_restore (SAVE_NONLOCAL
, stack
);
956 emit_use (hard_frame_pointer_rtx
);
957 emit_use (stack_pointer_rtx
);
958 emit_indirect_jump (lab
);
962 /* Search backwards and mark the jump insn as a non-local goto.
963 Note that this precludes the use of __builtin_longjmp to a
964 __builtin_setjmp target in the same function. However, we've
965 already cautioned the user that these functions are for
966 internal exception handling use only. */
967 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
969 gcc_assert (insn
!= last
);
973 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
976 else if (CALL_P (insn
))
982 more_const_call_expr_args_p (const const_call_expr_arg_iterator
*iter
)
984 return (iter
->i
< iter
->n
);
987 /* This function validates the types of a function call argument list
988 against a specified list of tree_codes. If the last specifier is a 0,
989 that represents an ellipses, otherwise the last specifier must be a
993 validate_arglist (const_tree callexpr
, ...)
998 const_call_expr_arg_iterator iter
;
1001 va_start (ap
, callexpr
);
1002 init_const_call_expr_arg_iterator (callexpr
, &iter
);
1006 code
= (enum tree_code
) va_arg (ap
, int);
1010 /* This signifies an ellipses, any further arguments are all ok. */
1014 /* This signifies an endlink, if no arguments remain, return
1015 true, otherwise return false. */
1016 res
= !more_const_call_expr_args_p (&iter
);
1019 /* If no parameters remain or the parameter's code does not
1020 match the specified code, return false. Otherwise continue
1021 checking any remaining arguments. */
1022 arg
= next_const_call_expr_arg (&iter
);
1023 if (!validate_arg (arg
, code
))
1030 /* We need gotos here since we can only have one VA_CLOSE in a
1038 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1039 and the address of the save area. */
1042 expand_builtin_nonlocal_goto (tree exp
)
1044 tree t_label
, t_save_area
;
1045 rtx r_label
, r_save_area
, r_fp
, r_sp
;
1048 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1051 t_label
= CALL_EXPR_ARG (exp
, 0);
1052 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1054 r_label
= expand_normal (t_label
);
1055 r_label
= convert_memory_address (Pmode
, r_label
);
1056 r_save_area
= expand_normal (t_save_area
);
1057 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1058 /* Copy the address of the save location to a register just in case it was
1059 based on the frame pointer. */
1060 r_save_area
= copy_to_reg (r_save_area
);
1061 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1062 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1063 plus_constant (Pmode
, r_save_area
,
1064 GET_MODE_SIZE (Pmode
)));
1066 crtl
->has_nonlocal_goto
= 1;
1068 /* ??? We no longer need to pass the static chain value, afaik. */
1069 if (targetm
.have_nonlocal_goto ())
1070 emit_insn (targetm
.gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1073 r_label
= copy_to_reg (r_label
);
1075 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1076 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1078 /* Restore frame pointer for containing function. */
1079 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1080 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1082 /* USE of hard_frame_pointer_rtx added for consistency;
1083 not clear if really needed. */
1084 emit_use (hard_frame_pointer_rtx
);
1085 emit_use (stack_pointer_rtx
);
1087 /* If the architecture is using a GP register, we must
1088 conservatively assume that the target function makes use of it.
1089 The prologue of functions with nonlocal gotos must therefore
1090 initialize the GP register to the appropriate value, and we
1091 must then make sure that this value is live at the point
1092 of the jump. (Note that this doesn't necessarily apply
1093 to targets with a nonlocal_goto pattern; they are free
1094 to implement it in their own way. Note also that this is
1095 a no-op if the GP register is a global invariant.) */
1096 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
1097 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
1098 emit_use (pic_offset_table_rtx
);
1100 emit_indirect_jump (r_label
);
1103 /* Search backwards to the jump insn and mark it as a
1105 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1109 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1112 else if (CALL_P (insn
))
1119 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1120 (not all will be used on all machines) that was passed to __builtin_setjmp.
1121 It updates the stack pointer in that block to the current value. This is
1122 also called directly by the SJLJ exception handling code. */
1125 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1127 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1129 = gen_rtx_MEM (sa_mode
,
1132 plus_constant (Pmode
, buf_addr
,
1133 2 * GET_MODE_SIZE (Pmode
))));
1135 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1138 /* Expand a call to __builtin_prefetch. For a target that does not support
1139 data prefetch, evaluate the memory address argument in case it has side
1143 expand_builtin_prefetch (tree exp
)
1145 tree arg0
, arg1
, arg2
;
1149 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1152 arg0
= CALL_EXPR_ARG (exp
, 0);
1154 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1155 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1157 nargs
= call_expr_nargs (exp
);
1159 arg1
= CALL_EXPR_ARG (exp
, 1);
1161 arg1
= integer_zero_node
;
1163 arg2
= CALL_EXPR_ARG (exp
, 2);
1165 arg2
= integer_three_node
;
1167 /* Argument 0 is an address. */
1168 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1170 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1171 if (TREE_CODE (arg1
) != INTEGER_CST
)
1173 error ("second argument to %<__builtin_prefetch%> must be a constant");
1174 arg1
= integer_zero_node
;
1176 op1
= expand_normal (arg1
);
1177 /* Argument 1 must be either zero or one. */
1178 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1180 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1185 /* Argument 2 (locality) must be a compile-time constant int. */
1186 if (TREE_CODE (arg2
) != INTEGER_CST
)
1188 error ("third argument to %<__builtin_prefetch%> must be a constant");
1189 arg2
= integer_zero_node
;
1191 op2
= expand_normal (arg2
);
1192 /* Argument 2 must be 0, 1, 2, or 3. */
1193 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1195 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1199 if (targetm
.have_prefetch ())
1201 struct expand_operand ops
[3];
1203 create_address_operand (&ops
[0], op0
);
1204 create_integer_operand (&ops
[1], INTVAL (op1
));
1205 create_integer_operand (&ops
[2], INTVAL (op2
));
1206 if (maybe_expand_insn (targetm
.code_for_prefetch
, 3, ops
))
1210 /* Don't do anything with direct references to volatile memory, but
1211 generate code to handle other side effects. */
1212 if (!MEM_P (op0
) && side_effects_p (op0
))
1216 /* Get a MEM rtx for expression EXP which is the address of an operand
1217 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1218 the maximum length of the block of memory that might be accessed or
1222 get_memory_rtx (tree exp
, tree len
)
1224 tree orig_exp
= exp
;
1227 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1228 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1229 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1230 exp
= TREE_OPERAND (exp
, 0);
1232 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1233 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1235 /* Get an expression we can use to find the attributes to assign to MEM.
1236 First remove any nops. */
1237 while (CONVERT_EXPR_P (exp
)
1238 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1239 exp
= TREE_OPERAND (exp
, 0);
1241 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1242 (as builtin stringops may alias with anything). */
1243 exp
= fold_build2 (MEM_REF
,
1244 build_array_type (char_type_node
,
1245 build_range_type (sizetype
,
1246 size_one_node
, len
)),
1247 exp
, build_int_cst (ptr_type_node
, 0));
1249 /* If the MEM_REF has no acceptable address, try to get the base object
1250 from the original address we got, and build an all-aliasing
1251 unknown-sized access to that one. */
1252 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1253 set_mem_attributes (mem
, exp
, 0);
1254 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1255 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1258 exp
= build_fold_addr_expr (exp
);
1259 exp
= fold_build2 (MEM_REF
,
1260 build_array_type (char_type_node
,
1261 build_range_type (sizetype
,
1264 exp
, build_int_cst (ptr_type_node
, 0));
1265 set_mem_attributes (mem
, exp
, 0);
1267 set_mem_alias_set (mem
, 0);
1271 /* Built-in functions to perform an untyped call and return. */
1273 #define apply_args_mode \
1274 (this_target_builtins->x_apply_args_mode)
1275 #define apply_result_mode \
1276 (this_target_builtins->x_apply_result_mode)
1278 /* Return the size required for the block returned by __builtin_apply_args,
1279 and initialize apply_args_mode. */
1282 apply_args_size (void)
1284 static int size
= -1;
1289 /* The values computed by this function never change. */
1292 /* The first value is the incoming arg-pointer. */
1293 size
= GET_MODE_SIZE (Pmode
);
1295 /* The second value is the structure value address unless this is
1296 passed as an "invisible" first argument. */
1297 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1298 size
+= GET_MODE_SIZE (Pmode
);
1300 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1301 if (FUNCTION_ARG_REGNO_P (regno
))
1303 mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1305 gcc_assert (mode
!= VOIDmode
);
1307 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1308 if (size
% align
!= 0)
1309 size
= CEIL (size
, align
) * align
;
1310 size
+= GET_MODE_SIZE (mode
);
1311 apply_args_mode
[regno
] = mode
;
1315 apply_args_mode
[regno
] = VOIDmode
;
1321 /* Return the size required for the block returned by __builtin_apply,
1322 and initialize apply_result_mode. */
1325 apply_result_size (void)
1327 static int size
= -1;
1331 /* The values computed by this function never change. */
1336 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1337 if (targetm
.calls
.function_value_regno_p (regno
))
1339 mode
= targetm
.calls
.get_raw_result_mode (regno
);
1341 gcc_assert (mode
!= VOIDmode
);
1343 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1344 if (size
% align
!= 0)
1345 size
= CEIL (size
, align
) * align
;
1346 size
+= GET_MODE_SIZE (mode
);
1347 apply_result_mode
[regno
] = mode
;
1350 apply_result_mode
[regno
] = VOIDmode
;
1352 /* Allow targets that use untyped_call and untyped_return to override
1353 the size so that machine-specific information can be stored here. */
1354 #ifdef APPLY_RESULT_SIZE
1355 size
= APPLY_RESULT_SIZE
;
1361 /* Create a vector describing the result block RESULT. If SAVEP is true,
1362 the result block is used to save the values; otherwise it is used to
1363 restore the values. */
1366 result_vector (int savep
, rtx result
)
1368 int regno
, size
, align
, nelts
;
1371 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1374 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1375 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1377 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1378 if (size
% align
!= 0)
1379 size
= CEIL (size
, align
) * align
;
1380 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1381 mem
= adjust_address (result
, mode
, size
);
1382 savevec
[nelts
++] = (savep
1383 ? gen_rtx_SET (mem
, reg
)
1384 : gen_rtx_SET (reg
, mem
));
1385 size
+= GET_MODE_SIZE (mode
);
1387 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1390 /* Save the state required to perform an untyped call with the same
1391 arguments as were passed to the current function. */
1394 expand_builtin_apply_args_1 (void)
1397 int size
, align
, regno
;
1399 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1401 /* Create a block where the arg-pointer, structure value address,
1402 and argument registers can be saved. */
1403 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1405 /* Walk past the arg-pointer and structure value address. */
1406 size
= GET_MODE_SIZE (Pmode
);
1407 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1408 size
+= GET_MODE_SIZE (Pmode
);
1410 /* Save each register used in calling a function to the block. */
1411 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1412 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1414 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1415 if (size
% align
!= 0)
1416 size
= CEIL (size
, align
) * align
;
1418 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1420 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1421 size
+= GET_MODE_SIZE (mode
);
1424 /* Save the arg pointer to the block. */
1425 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1426 /* We need the pointer as the caller actually passed them to us, not
1427 as we might have pretended they were passed. Make sure it's a valid
1428 operand, as emit_move_insn isn't expected to handle a PLUS. */
1429 if (STACK_GROWS_DOWNWARD
)
1431 = force_operand (plus_constant (Pmode
, tem
,
1432 crtl
->args
.pretend_args_size
),
1434 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1436 size
= GET_MODE_SIZE (Pmode
);
1438 /* Save the structure value address unless this is passed as an
1439 "invisible" first argument. */
1440 if (struct_incoming_value
)
1442 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1443 copy_to_reg (struct_incoming_value
));
1444 size
+= GET_MODE_SIZE (Pmode
);
1447 /* Return the address of the block. */
1448 return copy_addr_to_reg (XEXP (registers
, 0));
1451 /* __builtin_apply_args returns block of memory allocated on
1452 the stack into which is stored the arg pointer, structure
1453 value address, static chain, and all the registers that might
1454 possibly be used in performing a function call. The code is
1455 moved to the start of the function so the incoming values are
1459 expand_builtin_apply_args (void)
1461 /* Don't do __builtin_apply_args more than once in a function.
1462 Save the result of the first call and reuse it. */
1463 if (apply_args_value
!= 0)
1464 return apply_args_value
;
1466 /* When this function is called, it means that registers must be
1467 saved on entry to this function. So we migrate the
1468 call to the first insn of this function. */
1472 temp
= expand_builtin_apply_args_1 ();
1473 rtx_insn
*seq
= get_insns ();
1476 apply_args_value
= temp
;
1478 /* Put the insns after the NOTE that starts the function.
1479 If this is inside a start_sequence, make the outer-level insn
1480 chain current, so the code is placed at the start of the
1481 function. If internal_arg_pointer is a non-virtual pseudo,
1482 it needs to be placed after the function that initializes
1484 push_topmost_sequence ();
1485 if (REG_P (crtl
->args
.internal_arg_pointer
)
1486 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1487 emit_insn_before (seq
, parm_birth_insn
);
1489 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1490 pop_topmost_sequence ();
1495 /* Perform an untyped call and save the state required to perform an
1496 untyped return of whatever value was returned by the given function. */
1499 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1501 int size
, align
, regno
;
1503 rtx incoming_args
, result
, reg
, dest
, src
;
1504 rtx_call_insn
*call_insn
;
1505 rtx old_stack_level
= 0;
1506 rtx call_fusage
= 0;
1507 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1509 arguments
= convert_memory_address (Pmode
, arguments
);
1511 /* Create a block where the return registers can be saved. */
1512 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1514 /* Fetch the arg pointer from the ARGUMENTS block. */
1515 incoming_args
= gen_reg_rtx (Pmode
);
1516 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1517 if (!STACK_GROWS_DOWNWARD
)
1518 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1519 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1521 /* Push a new argument block and copy the arguments. Do not allow
1522 the (potential) memcpy call below to interfere with our stack
1524 do_pending_stack_adjust ();
1527 /* Save the stack with nonlocal if available. */
1528 if (targetm
.have_save_stack_nonlocal ())
1529 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1531 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1533 /* Allocate a block of memory onto the stack and copy the memory
1534 arguments to the outgoing arguments address. We can pass TRUE
1535 as the 4th argument because we just saved the stack pointer
1536 and will restore it right after the call. */
1537 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, true);
1539 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1540 may have already set current_function_calls_alloca to true.
1541 current_function_calls_alloca won't be set if argsize is zero,
1542 so we have to guarantee need_drap is true here. */
1543 if (SUPPORTS_STACK_ALIGNMENT
)
1544 crtl
->need_drap
= true;
1546 dest
= virtual_outgoing_args_rtx
;
1547 if (!STACK_GROWS_DOWNWARD
)
1549 if (CONST_INT_P (argsize
))
1550 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1552 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1554 dest
= gen_rtx_MEM (BLKmode
, dest
);
1555 set_mem_align (dest
, PARM_BOUNDARY
);
1556 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1557 set_mem_align (src
, PARM_BOUNDARY
);
1558 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1560 /* Refer to the argument block. */
1562 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1563 set_mem_align (arguments
, PARM_BOUNDARY
);
1565 /* Walk past the arg-pointer and structure value address. */
1566 size
= GET_MODE_SIZE (Pmode
);
1568 size
+= GET_MODE_SIZE (Pmode
);
1570 /* Restore each of the registers previously saved. Make USE insns
1571 for each of these registers for use in making the call. */
1572 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1573 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1575 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1576 if (size
% align
!= 0)
1577 size
= CEIL (size
, align
) * align
;
1578 reg
= gen_rtx_REG (mode
, regno
);
1579 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1580 use_reg (&call_fusage
, reg
);
1581 size
+= GET_MODE_SIZE (mode
);
1584 /* Restore the structure value address unless this is passed as an
1585 "invisible" first argument. */
1586 size
= GET_MODE_SIZE (Pmode
);
1589 rtx value
= gen_reg_rtx (Pmode
);
1590 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1591 emit_move_insn (struct_value
, value
);
1592 if (REG_P (struct_value
))
1593 use_reg (&call_fusage
, struct_value
);
1594 size
+= GET_MODE_SIZE (Pmode
);
1597 /* All arguments and registers used for the call are set up by now! */
1598 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1600 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1601 and we don't want to load it into a register as an optimization,
1602 because prepare_call_address already did it if it should be done. */
1603 if (GET_CODE (function
) != SYMBOL_REF
)
1604 function
= memory_address (FUNCTION_MODE
, function
);
1606 /* Generate the actual call instruction and save the return value. */
1607 if (targetm
.have_untyped_call ())
1609 rtx mem
= gen_rtx_MEM (FUNCTION_MODE
, function
);
1610 emit_call_insn (targetm
.gen_untyped_call (mem
, result
,
1611 result_vector (1, result
)));
1613 else if (targetm
.have_call_value ())
1617 /* Locate the unique return register. It is not possible to
1618 express a call that sets more than one return register using
1619 call_value; use untyped_call for that. In fact, untyped_call
1620 only needs to save the return registers in the given block. */
1621 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1622 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1624 gcc_assert (!valreg
); /* have_untyped_call required. */
1626 valreg
= gen_rtx_REG (mode
, regno
);
1629 emit_insn (targetm
.gen_call_value (valreg
,
1630 gen_rtx_MEM (FUNCTION_MODE
, function
),
1631 const0_rtx
, NULL_RTX
, const0_rtx
));
1633 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1638 /* Find the CALL insn we just emitted, and attach the register usage
1640 call_insn
= last_call_insn ();
1641 add_function_usage_to (call_insn
, call_fusage
);
1643 /* Restore the stack. */
1644 if (targetm
.have_save_stack_nonlocal ())
1645 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1647 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1648 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1652 /* Return the address of the result block. */
1653 result
= copy_addr_to_reg (XEXP (result
, 0));
1654 return convert_memory_address (ptr_mode
, result
);
1657 /* Perform an untyped return. */
1660 expand_builtin_return (rtx result
)
1662 int size
, align
, regno
;
1665 rtx_insn
*call_fusage
= 0;
1667 result
= convert_memory_address (Pmode
, result
);
1669 apply_result_size ();
1670 result
= gen_rtx_MEM (BLKmode
, result
);
1672 if (targetm
.have_untyped_return ())
1674 rtx vector
= result_vector (0, result
);
1675 emit_jump_insn (targetm
.gen_untyped_return (result
, vector
));
1680 /* Restore the return value and note that each value is used. */
1682 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1683 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1685 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1686 if (size
% align
!= 0)
1687 size
= CEIL (size
, align
) * align
;
1688 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1689 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1691 push_to_sequence (call_fusage
);
1693 call_fusage
= get_insns ();
1695 size
+= GET_MODE_SIZE (mode
);
1698 /* Put the USE insns before the return. */
1699 emit_insn (call_fusage
);
1701 /* Return whatever values was restored by jumping directly to the end
1703 expand_naked_return ();
1706 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1708 static enum type_class
1709 type_to_class (tree type
)
1711 switch (TREE_CODE (type
))
1713 case VOID_TYPE
: return void_type_class
;
1714 case INTEGER_TYPE
: return integer_type_class
;
1715 case ENUMERAL_TYPE
: return enumeral_type_class
;
1716 case BOOLEAN_TYPE
: return boolean_type_class
;
1717 case POINTER_TYPE
: return pointer_type_class
;
1718 case REFERENCE_TYPE
: return reference_type_class
;
1719 case OFFSET_TYPE
: return offset_type_class
;
1720 case REAL_TYPE
: return real_type_class
;
1721 case COMPLEX_TYPE
: return complex_type_class
;
1722 case FUNCTION_TYPE
: return function_type_class
;
1723 case METHOD_TYPE
: return method_type_class
;
1724 case RECORD_TYPE
: return record_type_class
;
1726 case QUAL_UNION_TYPE
: return union_type_class
;
1727 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1728 ? string_type_class
: array_type_class
);
1729 case LANG_TYPE
: return lang_type_class
;
1730 default: return no_type_class
;
1734 /* Expand a call EXP to __builtin_classify_type. */
1737 expand_builtin_classify_type (tree exp
)
1739 if (call_expr_nargs (exp
))
1740 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1741 return GEN_INT (no_type_class
);
1744 /* This helper macro, meant to be used in mathfn_built_in below,
1745 determines which among a set of three builtin math functions is
1746 appropriate for a given type mode. The `F' and `L' cases are
1747 automatically generated from the `double' case. */
1748 #define CASE_MATHFN(MATHFN) \
1749 CASE_CFN_##MATHFN: \
1750 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1751 fcodel = BUILT_IN_##MATHFN##L ; break;
1752 /* Similar to above, but appends _R after any F/L suffix. */
1753 #define CASE_MATHFN_REENT(MATHFN) \
1754 case CFN_BUILT_IN_##MATHFN##_R: \
1755 case CFN_BUILT_IN_##MATHFN##F_R: \
1756 case CFN_BUILT_IN_##MATHFN##L_R: \
1757 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1758 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1760 /* Return a function equivalent to FN but operating on floating-point
1761 values of type TYPE, or END_BUILTINS if no such function exists.
1762 This is purely an operation on function codes; it does not guarantee
1763 that the target actually has an implementation of the function. */
1765 static built_in_function
1766 mathfn_built_in_2 (tree type
, combined_fn fn
)
1768 built_in_function fcode
, fcodef
, fcodel
;
1782 CASE_MATHFN (COPYSIGN
)
1801 CASE_MATHFN_REENT (GAMMA
) /* GAMMA_R */
1802 CASE_MATHFN (HUGE_VAL
)
1806 CASE_MATHFN (IFLOOR
)
1809 CASE_MATHFN (IROUND
)
1816 CASE_MATHFN (LFLOOR
)
1817 CASE_MATHFN (LGAMMA
)
1818 CASE_MATHFN_REENT (LGAMMA
) /* LGAMMA_R */
1819 CASE_MATHFN (LLCEIL
)
1820 CASE_MATHFN (LLFLOOR
)
1821 CASE_MATHFN (LLRINT
)
1822 CASE_MATHFN (LLROUND
)
1829 CASE_MATHFN (LROUND
)
1833 CASE_MATHFN (NEARBYINT
)
1834 CASE_MATHFN (NEXTAFTER
)
1835 CASE_MATHFN (NEXTTOWARD
)
1839 CASE_MATHFN (REMAINDER
)
1840 CASE_MATHFN (REMQUO
)
1844 CASE_MATHFN (SCALBLN
)
1845 CASE_MATHFN (SCALBN
)
1846 CASE_MATHFN (SIGNBIT
)
1847 CASE_MATHFN (SIGNIFICAND
)
1849 CASE_MATHFN (SINCOS
)
1854 CASE_MATHFN (TGAMMA
)
1861 return END_BUILTINS
;
1864 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1866 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1868 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1871 return END_BUILTINS
;
1874 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1875 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1876 otherwise use the explicit declaration. If we can't do the conversion,
1880 mathfn_built_in_1 (tree type
, combined_fn fn
, bool implicit_p
)
1882 built_in_function fcode2
= mathfn_built_in_2 (type
, fn
);
1883 if (fcode2
== END_BUILTINS
)
1886 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
1889 return builtin_decl_explicit (fcode2
);
1892 /* Like mathfn_built_in_1, but always use the implicit array. */
1895 mathfn_built_in (tree type
, combined_fn fn
)
1897 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
1900 /* Like mathfn_built_in_1, but take a built_in_function and
1901 always use the implicit array. */
1904 mathfn_built_in (tree type
, enum built_in_function fn
)
1906 return mathfn_built_in_1 (type
, as_combined_fn (fn
), /*implicit=*/ 1);
1909 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1910 return its code, otherwise return IFN_LAST. Note that this function
1911 only tests whether the function is defined in internals.def, not whether
1912 it is actually available on the target. */
1915 associated_internal_fn (tree fndecl
)
1917 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
);
1918 tree return_type
= TREE_TYPE (TREE_TYPE (fndecl
));
1919 switch (DECL_FUNCTION_CODE (fndecl
))
1921 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1922 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1923 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1924 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1925 #include "internal-fn.def"
1927 CASE_FLT_FN (BUILT_IN_POW10
):
1930 CASE_FLT_FN (BUILT_IN_DREM
):
1931 return IFN_REMAINDER
;
1933 CASE_FLT_FN (BUILT_IN_SCALBN
):
1934 CASE_FLT_FN (BUILT_IN_SCALBLN
):
1935 if (REAL_MODE_FORMAT (TYPE_MODE (return_type
))->b
== 2)
1944 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
1945 on the current target by a call to an internal function, return the
1946 code of that internal function, otherwise return IFN_LAST. The caller
1947 is responsible for ensuring that any side-effects of the built-in
1948 call are dealt with correctly. E.g. if CALL sets errno, the caller
1949 must decide that the errno result isn't needed or make it available
1950 in some other way. */
1953 replacement_internal_fn (gcall
*call
)
1955 if (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
))
1957 internal_fn ifn
= associated_internal_fn (gimple_call_fndecl (call
));
1958 if (ifn
!= IFN_LAST
)
1960 tree_pair types
= direct_internal_fn_types (ifn
, call
);
1961 optimization_type opt_type
= bb_optimization_type (gimple_bb (call
));
1962 if (direct_internal_fn_supported_p (ifn
, types
, opt_type
))
1969 /* Expand a call to the builtin trinary math functions (fma).
1970 Return NULL_RTX if a normal call should be emitted rather than expanding the
1971 function in-line. EXP is the expression that is a call to the builtin
1972 function; if convenient, the result should be placed in TARGET.
1973 SUBTARGET may be used as the target for computing one of EXP's
1977 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
1979 optab builtin_optab
;
1980 rtx op0
, op1
, op2
, result
;
1982 tree fndecl
= get_callee_fndecl (exp
);
1983 tree arg0
, arg1
, arg2
;
1986 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
1989 arg0
= CALL_EXPR_ARG (exp
, 0);
1990 arg1
= CALL_EXPR_ARG (exp
, 1);
1991 arg2
= CALL_EXPR_ARG (exp
, 2);
1993 switch (DECL_FUNCTION_CODE (fndecl
))
1995 CASE_FLT_FN (BUILT_IN_FMA
):
1996 builtin_optab
= fma_optab
; break;
2001 /* Make a suitable register to place result in. */
2002 mode
= TYPE_MODE (TREE_TYPE (exp
));
2004 /* Before working hard, check whether the instruction is available. */
2005 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2008 result
= gen_reg_rtx (mode
);
2010 /* Always stabilize the argument list. */
2011 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2012 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2013 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2015 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2016 op1
= expand_normal (arg1
);
2017 op2
= expand_normal (arg2
);
2021 /* Compute into RESULT.
2022 Set RESULT to wherever the result comes back. */
2023 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2026 /* If we were unable to expand via the builtin, stop the sequence
2027 (without outputting the insns) and call to the library function
2028 with the stabilized argument list. */
2032 return expand_call (exp
, target
, target
== const0_rtx
);
2035 /* Output the entire sequence. */
2036 insns
= get_insns ();
2043 /* Expand a call to the builtin sin and cos math functions.
2044 Return NULL_RTX if a normal call should be emitted rather than expanding the
2045 function in-line. EXP is the expression that is a call to the builtin
2046 function; if convenient, the result should be placed in TARGET.
2047 SUBTARGET may be used as the target for computing one of EXP's
2051 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2053 optab builtin_optab
;
2056 tree fndecl
= get_callee_fndecl (exp
);
2060 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2063 arg
= CALL_EXPR_ARG (exp
, 0);
2065 switch (DECL_FUNCTION_CODE (fndecl
))
2067 CASE_FLT_FN (BUILT_IN_SIN
):
2068 CASE_FLT_FN (BUILT_IN_COS
):
2069 builtin_optab
= sincos_optab
; break;
2074 /* Make a suitable register to place result in. */
2075 mode
= TYPE_MODE (TREE_TYPE (exp
));
2077 /* Check if sincos insn is available, otherwise fallback
2078 to sin or cos insn. */
2079 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2080 switch (DECL_FUNCTION_CODE (fndecl
))
2082 CASE_FLT_FN (BUILT_IN_SIN
):
2083 builtin_optab
= sin_optab
; break;
2084 CASE_FLT_FN (BUILT_IN_COS
):
2085 builtin_optab
= cos_optab
; break;
2090 /* Before working hard, check whether the instruction is available. */
2091 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2093 rtx result
= gen_reg_rtx (mode
);
2095 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2096 need to expand the argument again. This way, we will not perform
2097 side-effects more the once. */
2098 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2100 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2104 /* Compute into RESULT.
2105 Set RESULT to wherever the result comes back. */
2106 if (builtin_optab
== sincos_optab
)
2110 switch (DECL_FUNCTION_CODE (fndecl
))
2112 CASE_FLT_FN (BUILT_IN_SIN
):
2113 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2115 CASE_FLT_FN (BUILT_IN_COS
):
2116 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2124 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2128 /* Output the entire sequence. */
2129 insns
= get_insns ();
2135 /* If we were unable to expand via the builtin, stop the sequence
2136 (without outputting the insns) and call to the library function
2137 with the stabilized argument list. */
2141 return expand_call (exp
, target
, target
== const0_rtx
);
2144 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2145 return an RTL instruction code that implements the functionality.
2146 If that isn't possible or available return CODE_FOR_nothing. */
2148 static enum insn_code
2149 interclass_mathfn_icode (tree arg
, tree fndecl
)
2151 bool errno_set
= false;
2152 optab builtin_optab
= unknown_optab
;
2155 switch (DECL_FUNCTION_CODE (fndecl
))
2157 CASE_FLT_FN (BUILT_IN_ILOGB
):
2158 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2159 CASE_FLT_FN (BUILT_IN_ISINF
):
2160 builtin_optab
= isinf_optab
; break;
2161 case BUILT_IN_ISNORMAL
:
2162 case BUILT_IN_ISFINITE
:
2163 CASE_FLT_FN (BUILT_IN_FINITE
):
2164 case BUILT_IN_FINITED32
:
2165 case BUILT_IN_FINITED64
:
2166 case BUILT_IN_FINITED128
:
2167 case BUILT_IN_ISINFD32
:
2168 case BUILT_IN_ISINFD64
:
2169 case BUILT_IN_ISINFD128
:
2170 /* These builtins have no optabs (yet). */
2176 /* There's no easy way to detect the case we need to set EDOM. */
2177 if (flag_errno_math
&& errno_set
)
2178 return CODE_FOR_nothing
;
2180 /* Optab mode depends on the mode of the input argument. */
2181 mode
= TYPE_MODE (TREE_TYPE (arg
));
2184 return optab_handler (builtin_optab
, mode
);
2185 return CODE_FOR_nothing
;
2188 /* Expand a call to one of the builtin math functions that operate on
2189 floating point argument and output an integer result (ilogb, isinf,
2191 Return 0 if a normal call should be emitted rather than expanding the
2192 function in-line. EXP is the expression that is a call to the builtin
2193 function; if convenient, the result should be placed in TARGET. */
2196 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2198 enum insn_code icode
= CODE_FOR_nothing
;
2200 tree fndecl
= get_callee_fndecl (exp
);
2204 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2207 arg
= CALL_EXPR_ARG (exp
, 0);
2208 icode
= interclass_mathfn_icode (arg
, fndecl
);
2209 mode
= TYPE_MODE (TREE_TYPE (arg
));
2211 if (icode
!= CODE_FOR_nothing
)
2213 struct expand_operand ops
[1];
2214 rtx_insn
*last
= get_last_insn ();
2215 tree orig_arg
= arg
;
2217 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2218 need to expand the argument again. This way, we will not perform
2219 side-effects more the once. */
2220 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2222 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2224 if (mode
!= GET_MODE (op0
))
2225 op0
= convert_to_mode (mode
, op0
, 0);
2227 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2228 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2229 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2230 return ops
[0].value
;
2232 delete_insns_since (last
);
2233 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2239 /* Expand a call to the builtin sincos math function.
2240 Return NULL_RTX if a normal call should be emitted rather than expanding the
2241 function in-line. EXP is the expression that is a call to the builtin
2245 expand_builtin_sincos (tree exp
)
2247 rtx op0
, op1
, op2
, target1
, target2
;
2249 tree arg
, sinp
, cosp
;
2251 location_t loc
= EXPR_LOCATION (exp
);
2252 tree alias_type
, alias_off
;
2254 if (!validate_arglist (exp
, REAL_TYPE
,
2255 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2258 arg
= CALL_EXPR_ARG (exp
, 0);
2259 sinp
= CALL_EXPR_ARG (exp
, 1);
2260 cosp
= CALL_EXPR_ARG (exp
, 2);
2262 /* Make a suitable register to place result in. */
2263 mode
= TYPE_MODE (TREE_TYPE (arg
));
2265 /* Check if sincos insn is available, otherwise emit the call. */
2266 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2269 target1
= gen_reg_rtx (mode
);
2270 target2
= gen_reg_rtx (mode
);
2272 op0
= expand_normal (arg
);
2273 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2274 alias_off
= build_int_cst (alias_type
, 0);
2275 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2277 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2280 /* Compute into target1 and target2.
2281 Set TARGET to wherever the result comes back. */
2282 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2283 gcc_assert (result
);
2285 /* Move target1 and target2 to the memory locations indicated
2287 emit_move_insn (op1
, target1
);
2288 emit_move_insn (op2
, target2
);
2293 /* Expand a call to the internal cexpi builtin to the sincos math function.
2294 EXP is the expression that is a call to the builtin function; if convenient,
2295 the result should be placed in TARGET. */
2298 expand_builtin_cexpi (tree exp
, rtx target
)
2300 tree fndecl
= get_callee_fndecl (exp
);
2304 location_t loc
= EXPR_LOCATION (exp
);
2306 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2309 arg
= CALL_EXPR_ARG (exp
, 0);
2310 type
= TREE_TYPE (arg
);
2311 mode
= TYPE_MODE (TREE_TYPE (arg
));
2313 /* Try expanding via a sincos optab, fall back to emitting a libcall
2314 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2315 is only generated from sincos, cexp or if we have either of them. */
2316 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2318 op1
= gen_reg_rtx (mode
);
2319 op2
= gen_reg_rtx (mode
);
2321 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2323 /* Compute into op1 and op2. */
2324 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2326 else if (targetm
.libc_has_function (function_sincos
))
2328 tree call
, fn
= NULL_TREE
;
2332 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2333 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2334 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2335 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2336 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2337 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2341 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2342 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2343 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2344 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2345 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2346 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2348 /* Make sure not to fold the sincos call again. */
2349 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2350 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2351 call
, 3, arg
, top1
, top2
));
2355 tree call
, fn
= NULL_TREE
, narg
;
2356 tree ctype
= build_complex_type (type
);
2358 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2359 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2360 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2361 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2362 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2363 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2367 /* If we don't have a decl for cexp create one. This is the
2368 friendliest fallback if the user calls __builtin_cexpi
2369 without full target C99 function support. */
2370 if (fn
== NULL_TREE
)
2373 const char *name
= NULL
;
2375 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2377 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2379 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2382 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2383 fn
= build_fn_decl (name
, fntype
);
2386 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2387 build_real (type
, dconst0
), arg
);
2389 /* Make sure not to fold the cexp call again. */
2390 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2391 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2392 target
, VOIDmode
, EXPAND_NORMAL
);
2395 /* Now build the proper return type. */
2396 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2397 make_tree (TREE_TYPE (arg
), op2
),
2398 make_tree (TREE_TYPE (arg
), op1
)),
2399 target
, VOIDmode
, EXPAND_NORMAL
);
2402 /* Conveniently construct a function call expression. FNDECL names the
2403 function to be called, N is the number of arguments, and the "..."
2404 parameters are the argument expressions. Unlike build_call_exr
2405 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2408 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2411 tree fntype
= TREE_TYPE (fndecl
);
2412 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2415 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2417 SET_EXPR_LOCATION (fn
, loc
);
2421 /* Expand a call to one of the builtin rounding functions gcc defines
2422 as an extension (lfloor and lceil). As these are gcc extensions we
2423 do not need to worry about setting errno to EDOM.
2424 If expanding via optab fails, lower expression to (int)(floor(x)).
2425 EXP is the expression that is a call to the builtin function;
2426 if convenient, the result should be placed in TARGET. */
2429 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2431 convert_optab builtin_optab
;
2434 tree fndecl
= get_callee_fndecl (exp
);
2435 enum built_in_function fallback_fn
;
2436 tree fallback_fndecl
;
2440 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2443 arg
= CALL_EXPR_ARG (exp
, 0);
2445 switch (DECL_FUNCTION_CODE (fndecl
))
2447 CASE_FLT_FN (BUILT_IN_ICEIL
):
2448 CASE_FLT_FN (BUILT_IN_LCEIL
):
2449 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2450 builtin_optab
= lceil_optab
;
2451 fallback_fn
= BUILT_IN_CEIL
;
2454 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2455 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2456 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2457 builtin_optab
= lfloor_optab
;
2458 fallback_fn
= BUILT_IN_FLOOR
;
2465 /* Make a suitable register to place result in. */
2466 mode
= TYPE_MODE (TREE_TYPE (exp
));
2468 target
= gen_reg_rtx (mode
);
2470 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2471 need to expand the argument again. This way, we will not perform
2472 side-effects more the once. */
2473 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2475 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2479 /* Compute into TARGET. */
2480 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2482 /* Output the entire sequence. */
2483 insns
= get_insns ();
2489 /* If we were unable to expand via the builtin, stop the sequence
2490 (without outputting the insns). */
2493 /* Fall back to floating point rounding optab. */
2494 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2496 /* For non-C99 targets we may end up without a fallback fndecl here
2497 if the user called __builtin_lfloor directly. In this case emit
2498 a call to the floor/ceil variants nevertheless. This should result
2499 in the best user experience for not full C99 targets. */
2500 if (fallback_fndecl
== NULL_TREE
)
2503 const char *name
= NULL
;
2505 switch (DECL_FUNCTION_CODE (fndecl
))
2507 case BUILT_IN_ICEIL
:
2508 case BUILT_IN_LCEIL
:
2509 case BUILT_IN_LLCEIL
:
2512 case BUILT_IN_ICEILF
:
2513 case BUILT_IN_LCEILF
:
2514 case BUILT_IN_LLCEILF
:
2517 case BUILT_IN_ICEILL
:
2518 case BUILT_IN_LCEILL
:
2519 case BUILT_IN_LLCEILL
:
2522 case BUILT_IN_IFLOOR
:
2523 case BUILT_IN_LFLOOR
:
2524 case BUILT_IN_LLFLOOR
:
2527 case BUILT_IN_IFLOORF
:
2528 case BUILT_IN_LFLOORF
:
2529 case BUILT_IN_LLFLOORF
:
2532 case BUILT_IN_IFLOORL
:
2533 case BUILT_IN_LFLOORL
:
2534 case BUILT_IN_LLFLOORL
:
2541 fntype
= build_function_type_list (TREE_TYPE (arg
),
2542 TREE_TYPE (arg
), NULL_TREE
);
2543 fallback_fndecl
= build_fn_decl (name
, fntype
);
2546 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2548 tmp
= expand_normal (exp
);
2549 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
2551 /* Truncate the result of floating point optab to integer
2552 via expand_fix (). */
2553 target
= gen_reg_rtx (mode
);
2554 expand_fix (target
, tmp
, 0);
2559 /* Expand a call to one of the builtin math functions doing integer
2561 Return 0 if a normal call should be emitted rather than expanding the
2562 function in-line. EXP is the expression that is a call to the builtin
2563 function; if convenient, the result should be placed in TARGET. */
2566 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2568 convert_optab builtin_optab
;
2571 tree fndecl
= get_callee_fndecl (exp
);
2574 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2576 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2579 arg
= CALL_EXPR_ARG (exp
, 0);
2581 switch (DECL_FUNCTION_CODE (fndecl
))
2583 CASE_FLT_FN (BUILT_IN_IRINT
):
2584 fallback_fn
= BUILT_IN_LRINT
;
2586 CASE_FLT_FN (BUILT_IN_LRINT
):
2587 CASE_FLT_FN (BUILT_IN_LLRINT
):
2588 builtin_optab
= lrint_optab
;
2591 CASE_FLT_FN (BUILT_IN_IROUND
):
2592 fallback_fn
= BUILT_IN_LROUND
;
2594 CASE_FLT_FN (BUILT_IN_LROUND
):
2595 CASE_FLT_FN (BUILT_IN_LLROUND
):
2596 builtin_optab
= lround_optab
;
2603 /* There's no easy way to detect the case we need to set EDOM. */
2604 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2607 /* Make a suitable register to place result in. */
2608 mode
= TYPE_MODE (TREE_TYPE (exp
));
2610 /* There's no easy way to detect the case we need to set EDOM. */
2611 if (!flag_errno_math
)
2613 rtx result
= gen_reg_rtx (mode
);
2615 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2616 need to expand the argument again. This way, we will not perform
2617 side-effects more the once. */
2618 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2620 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2624 if (expand_sfix_optab (result
, op0
, builtin_optab
))
2626 /* Output the entire sequence. */
2627 insns
= get_insns ();
2633 /* If we were unable to expand via the builtin, stop the sequence
2634 (without outputting the insns) and call to the library function
2635 with the stabilized argument list. */
2639 if (fallback_fn
!= BUILT_IN_NONE
)
2641 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2642 targets, (int) round (x) should never be transformed into
2643 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2644 a call to lround in the hope that the target provides at least some
2645 C99 functions. This should result in the best user experience for
2646 not full C99 targets. */
2647 tree fallback_fndecl
= mathfn_built_in_1
2648 (TREE_TYPE (arg
), as_combined_fn (fallback_fn
), 0);
2650 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2651 fallback_fndecl
, 1, arg
);
2653 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2654 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
2655 return convert_to_mode (mode
, target
, 0);
2658 return expand_call (exp
, target
, target
== const0_rtx
);
2661 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2662 a normal call should be emitted rather than expanding the function
2663 in-line. EXP is the expression that is a call to the builtin
2664 function; if convenient, the result should be placed in TARGET. */
2667 expand_builtin_powi (tree exp
, rtx target
)
2674 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2677 arg0
= CALL_EXPR_ARG (exp
, 0);
2678 arg1
= CALL_EXPR_ARG (exp
, 1);
2679 mode
= TYPE_MODE (TREE_TYPE (exp
));
2681 /* Emit a libcall to libgcc. */
2683 /* Mode of the 2nd argument must match that of an int. */
2684 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
2686 if (target
== NULL_RTX
)
2687 target
= gen_reg_rtx (mode
);
2689 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2690 if (GET_MODE (op0
) != mode
)
2691 op0
= convert_to_mode (mode
, op0
, 0);
2692 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2693 if (GET_MODE (op1
) != mode2
)
2694 op1
= convert_to_mode (mode2
, op1
, 0);
2696 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
2697 target
, LCT_CONST
, mode
, 2,
2698 op0
, mode
, op1
, mode2
);
2703 /* Expand expression EXP which is a call to the strlen builtin. Return
2704 NULL_RTX if we failed the caller should emit a normal call, otherwise
2705 try to get the result in TARGET, if convenient. */
2708 expand_builtin_strlen (tree exp
, rtx target
,
2709 machine_mode target_mode
)
2711 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
2715 struct expand_operand ops
[4];
2718 tree src
= CALL_EXPR_ARG (exp
, 0);
2720 rtx_insn
*before_strlen
;
2721 machine_mode insn_mode
= target_mode
;
2722 enum insn_code icode
= CODE_FOR_nothing
;
2725 /* If the length can be computed at compile-time, return it. */
2726 len
= c_strlen (src
, 0);
2728 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2730 /* If the length can be computed at compile-time and is constant
2731 integer, but there are side-effects in src, evaluate
2732 src for side-effects, then return len.
2733 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2734 can be optimized into: i++; x = 3; */
2735 len
= c_strlen (src
, 1);
2736 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
2738 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2739 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2742 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
2744 /* If SRC is not a pointer type, don't do this operation inline. */
2748 /* Bail out if we can't compute strlen in the right mode. */
2749 while (insn_mode
!= VOIDmode
)
2751 icode
= optab_handler (strlen_optab
, insn_mode
);
2752 if (icode
!= CODE_FOR_nothing
)
2755 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
2757 if (insn_mode
== VOIDmode
)
2760 /* Make a place to hold the source address. We will not expand
2761 the actual source until we are sure that the expansion will
2762 not fail -- there are trees that cannot be expanded twice. */
2763 src_reg
= gen_reg_rtx (Pmode
);
2765 /* Mark the beginning of the strlen sequence so we can emit the
2766 source operand later. */
2767 before_strlen
= get_last_insn ();
2769 create_output_operand (&ops
[0], target
, insn_mode
);
2770 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
2771 create_integer_operand (&ops
[2], 0);
2772 create_integer_operand (&ops
[3], align
);
2773 if (!maybe_expand_insn (icode
, 4, ops
))
2776 /* Now that we are assured of success, expand the source. */
2778 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
2781 #ifdef POINTERS_EXTEND_UNSIGNED
2782 if (GET_MODE (pat
) != Pmode
)
2783 pat
= convert_to_mode (Pmode
, pat
,
2784 POINTERS_EXTEND_UNSIGNED
);
2786 emit_move_insn (src_reg
, pat
);
2792 emit_insn_after (pat
, before_strlen
);
2794 emit_insn_before (pat
, get_insns ());
2796 /* Return the value in the proper mode for this function. */
2797 if (GET_MODE (ops
[0].value
) == target_mode
)
2798 target
= ops
[0].value
;
2799 else if (target
!= 0)
2800 convert_move (target
, ops
[0].value
, 0);
2802 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
2808 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2809 bytes from constant string DATA + OFFSET and return it as target
2813 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
2816 const char *str
= (const char *) data
;
2818 gcc_assert (offset
>= 0
2819 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
2820 <= strlen (str
) + 1));
2822 return c_readstr (str
+ offset
, mode
);
2825 /* LEN specify length of the block of memcpy/memset operation.
2826 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2827 In some cases we can make very likely guess on max size, then we
2828 set it into PROBABLE_MAX_SIZE. */
2831 determine_block_size (tree len
, rtx len_rtx
,
2832 unsigned HOST_WIDE_INT
*min_size
,
2833 unsigned HOST_WIDE_INT
*max_size
,
2834 unsigned HOST_WIDE_INT
*probable_max_size
)
2836 if (CONST_INT_P (len_rtx
))
2838 *min_size
= *max_size
= *probable_max_size
= UINTVAL (len_rtx
);
2844 enum value_range_type range_type
= VR_UNDEFINED
;
2846 /* Determine bounds from the type. */
2847 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len
))))
2848 *min_size
= tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len
)));
2851 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len
))))
2852 *probable_max_size
= *max_size
2853 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len
)));
2855 *probable_max_size
= *max_size
= GET_MODE_MASK (GET_MODE (len_rtx
));
2857 if (TREE_CODE (len
) == SSA_NAME
)
2858 range_type
= get_range_info (len
, &min
, &max
);
2859 if (range_type
== VR_RANGE
)
2861 if (wi::fits_uhwi_p (min
) && *min_size
< min
.to_uhwi ())
2862 *min_size
= min
.to_uhwi ();
2863 if (wi::fits_uhwi_p (max
) && *max_size
> max
.to_uhwi ())
2864 *probable_max_size
= *max_size
= max
.to_uhwi ();
2866 else if (range_type
== VR_ANTI_RANGE
)
2868 /* Anti range 0...N lets us to determine minimal size to N+1. */
2871 if (wi::fits_uhwi_p (max
) && max
.to_uhwi () + 1 != 0)
2872 *min_size
= max
.to_uhwi () + 1;
2880 Produce anti range allowing negative values of N. We still
2881 can use the information and make a guess that N is not negative.
2883 else if (!wi::leu_p (max
, 1 << 30) && wi::fits_uhwi_p (min
))
2884 *probable_max_size
= min
.to_uhwi () - 1;
2887 gcc_checking_assert (*max_size
<=
2888 (unsigned HOST_WIDE_INT
)
2889 GET_MODE_MASK (GET_MODE (len_rtx
)));
2892 /* Helper function to do the actual work for expand_builtin_memcpy. */
2895 expand_builtin_memcpy_args (tree dest
, tree src
, tree len
, rtx target
, tree exp
)
2897 const char *src_str
;
2898 unsigned int src_align
= get_pointer_alignment (src
);
2899 unsigned int dest_align
= get_pointer_alignment (dest
);
2900 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
2901 HOST_WIDE_INT expected_size
= -1;
2902 unsigned int expected_align
= 0;
2903 unsigned HOST_WIDE_INT min_size
;
2904 unsigned HOST_WIDE_INT max_size
;
2905 unsigned HOST_WIDE_INT probable_max_size
;
2907 /* If DEST is not a pointer type, call the normal function. */
2908 if (dest_align
== 0)
2911 /* If either SRC is not a pointer type, don't do this
2912 operation in-line. */
2916 if (currently_expanding_gimple_stmt
)
2917 stringop_block_profile (currently_expanding_gimple_stmt
,
2918 &expected_align
, &expected_size
);
2920 if (expected_align
< dest_align
)
2921 expected_align
= dest_align
;
2922 dest_mem
= get_memory_rtx (dest
, len
);
2923 set_mem_align (dest_mem
, dest_align
);
2924 len_rtx
= expand_normal (len
);
2925 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
2926 &probable_max_size
);
2927 src_str
= c_getstr (src
);
2929 /* If SRC is a string constant and block move would be done
2930 by pieces, we can avoid loading the string from memory
2931 and only stored the computed constants. */
2933 && CONST_INT_P (len_rtx
)
2934 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
2935 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
2936 CONST_CAST (char *, src_str
),
2939 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
2940 builtin_memcpy_read_str
,
2941 CONST_CAST (char *, src_str
),
2942 dest_align
, false, 0);
2943 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
2944 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
2948 src_mem
= get_memory_rtx (src
, len
);
2949 set_mem_align (src_mem
, src_align
);
2951 /* Copy word part most expediently. */
2952 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
2953 CALL_EXPR_TAILCALL (exp
)
2954 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
2955 expected_align
, expected_size
,
2956 min_size
, max_size
, probable_max_size
);
2960 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
2961 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
2967 /* Expand a call EXP to the memcpy builtin.
2968 Return NULL_RTX if we failed, the caller should emit a normal call,
2969 otherwise try to get the result in TARGET, if convenient (and in
2970 mode MODE if that's convenient). */
2973 expand_builtin_memcpy (tree exp
, rtx target
)
2975 if (!validate_arglist (exp
,
2976 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2980 tree dest
= CALL_EXPR_ARG (exp
, 0);
2981 tree src
= CALL_EXPR_ARG (exp
, 1);
2982 tree len
= CALL_EXPR_ARG (exp
, 2);
2983 return expand_builtin_memcpy_args (dest
, src
, len
, target
, exp
);
2987 /* Expand an instrumented call EXP to the memcpy builtin.
2988 Return NULL_RTX if we failed, the caller should emit a normal call,
2989 otherwise try to get the result in TARGET, if convenient (and in
2990 mode MODE if that's convenient). */
2993 expand_builtin_memcpy_with_bounds (tree exp
, rtx target
)
2995 if (!validate_arglist (exp
,
2996 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
2997 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
2998 INTEGER_TYPE
, VOID_TYPE
))
3002 tree dest
= CALL_EXPR_ARG (exp
, 0);
3003 tree src
= CALL_EXPR_ARG (exp
, 2);
3004 tree len
= CALL_EXPR_ARG (exp
, 4);
3005 rtx res
= expand_builtin_memcpy_args (dest
, src
, len
, target
, exp
);
3007 /* Return src bounds with the result. */
3010 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3011 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3012 res
= chkp_join_splitted_slot (res
, bnd
);
3018 /* Expand a call EXP to the mempcpy builtin.
3019 Return NULL_RTX if we failed; the caller should emit a normal call,
3020 otherwise try to get the result in TARGET, if convenient (and in
3021 mode MODE if that's convenient). If ENDP is 0 return the
3022 destination pointer, if ENDP is 1 return the end pointer ala
3023 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3027 expand_builtin_mempcpy (tree exp
, rtx target
, machine_mode mode
)
3029 if (!validate_arglist (exp
,
3030 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3034 tree dest
= CALL_EXPR_ARG (exp
, 0);
3035 tree src
= CALL_EXPR_ARG (exp
, 1);
3036 tree len
= CALL_EXPR_ARG (exp
, 2);
3037 return expand_builtin_mempcpy_args (dest
, src
, len
,
3038 target
, mode
, /*endp=*/ 1,
3043 /* Expand an instrumented call EXP to the mempcpy builtin.
3044 Return NULL_RTX if we failed, the caller should emit a normal call,
3045 otherwise try to get the result in TARGET, if convenient (and in
3046 mode MODE if that's convenient). */
3049 expand_builtin_mempcpy_with_bounds (tree exp
, rtx target
, machine_mode mode
)
3051 if (!validate_arglist (exp
,
3052 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3053 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3054 INTEGER_TYPE
, VOID_TYPE
))
3058 tree dest
= CALL_EXPR_ARG (exp
, 0);
3059 tree src
= CALL_EXPR_ARG (exp
, 2);
3060 tree len
= CALL_EXPR_ARG (exp
, 4);
3061 rtx res
= expand_builtin_mempcpy_args (dest
, src
, len
, target
,
3064 /* Return src bounds with the result. */
3067 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3068 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3069 res
= chkp_join_splitted_slot (res
, bnd
);
3075 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3076 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3077 so that this can also be called without constructing an actual CALL_EXPR.
3078 The other arguments and return value are the same as for
3079 expand_builtin_mempcpy. */
3082 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3083 rtx target
, machine_mode mode
, int endp
,
3086 tree fndecl
= get_callee_fndecl (orig_exp
);
3088 /* If return value is ignored, transform mempcpy into memcpy. */
3089 if (target
== const0_rtx
3090 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3091 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
))
3093 tree fn
= builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
);
3094 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3096 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3098 else if (target
== const0_rtx
3099 && builtin_decl_implicit_p (BUILT_IN_MEMCPY
))
3101 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3102 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3104 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3108 const char *src_str
;
3109 unsigned int src_align
= get_pointer_alignment (src
);
3110 unsigned int dest_align
= get_pointer_alignment (dest
);
3111 rtx dest_mem
, src_mem
, len_rtx
;
3113 /* If either SRC or DEST is not a pointer type, don't do this
3114 operation in-line. */
3115 if (dest_align
== 0 || src_align
== 0)
3118 /* If LEN is not constant, call the normal function. */
3119 if (! tree_fits_uhwi_p (len
))
3122 len_rtx
= expand_normal (len
);
3123 src_str
= c_getstr (src
);
3125 /* If SRC is a string constant and block move would be done
3126 by pieces, we can avoid loading the string from memory
3127 and only stored the computed constants. */
3129 && CONST_INT_P (len_rtx
)
3130 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3131 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3132 CONST_CAST (char *, src_str
),
3135 dest_mem
= get_memory_rtx (dest
, len
);
3136 set_mem_align (dest_mem
, dest_align
);
3137 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3138 builtin_memcpy_read_str
,
3139 CONST_CAST (char *, src_str
),
3140 dest_align
, false, endp
);
3141 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3142 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3146 if (CONST_INT_P (len_rtx
)
3147 && can_move_by_pieces (INTVAL (len_rtx
),
3148 MIN (dest_align
, src_align
)))
3150 dest_mem
= get_memory_rtx (dest
, len
);
3151 set_mem_align (dest_mem
, dest_align
);
3152 src_mem
= get_memory_rtx (src
, len
);
3153 set_mem_align (src_mem
, src_align
);
3154 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3155 MIN (dest_align
, src_align
), endp
);
3156 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3157 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3165 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3166 we failed, the caller should emit a normal call, otherwise try to
3167 get the result in TARGET, if convenient. If ENDP is 0 return the
3168 destination pointer, if ENDP is 1 return the end pointer ala
3169 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3173 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3175 struct expand_operand ops
[3];
3179 if (!targetm
.have_movstr ())
3182 dest_mem
= get_memory_rtx (dest
, NULL
);
3183 src_mem
= get_memory_rtx (src
, NULL
);
3186 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3187 dest_mem
= replace_equiv_address (dest_mem
, target
);
3190 create_output_operand (&ops
[0], endp
? target
: NULL_RTX
, Pmode
);
3191 create_fixed_operand (&ops
[1], dest_mem
);
3192 create_fixed_operand (&ops
[2], src_mem
);
3193 if (!maybe_expand_insn (targetm
.code_for_movstr
, 3, ops
))
3196 if (endp
&& target
!= const0_rtx
)
3198 target
= ops
[0].value
;
3199 /* movstr is supposed to set end to the address of the NUL
3200 terminator. If the caller requested a mempcpy-like return value,
3204 rtx tem
= plus_constant (GET_MODE (target
),
3205 gen_lowpart (GET_MODE (target
), target
), 1);
3206 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3212 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3213 NULL_RTX if we failed the caller should emit a normal call, otherwise
3214 try to get the result in TARGET, if convenient (and in mode MODE if that's
3218 expand_builtin_strcpy (tree exp
, rtx target
)
3220 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3222 tree dest
= CALL_EXPR_ARG (exp
, 0);
3223 tree src
= CALL_EXPR_ARG (exp
, 1);
3224 return expand_builtin_strcpy_args (dest
, src
, target
);
3229 /* Helper function to do the actual work for expand_builtin_strcpy. The
3230 arguments to the builtin_strcpy call DEST and SRC are broken out
3231 so that this can also be called without constructing an actual CALL_EXPR.
3232 The other arguments and return value are the same as for
3233 expand_builtin_strcpy. */
3236 expand_builtin_strcpy_args (tree dest
, tree src
, rtx target
)
3238 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3241 /* Expand a call EXP to the stpcpy builtin.
3242 Return NULL_RTX if we failed the caller should emit a normal call,
3243 otherwise try to get the result in TARGET, if convenient (and in
3244 mode MODE if that's convenient). */
3247 expand_builtin_stpcpy (tree exp
, rtx target
, machine_mode mode
)
3250 location_t loc
= EXPR_LOCATION (exp
);
3252 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3255 dst
= CALL_EXPR_ARG (exp
, 0);
3256 src
= CALL_EXPR_ARG (exp
, 1);
3258 /* If return value is ignored, transform stpcpy into strcpy. */
3259 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
3261 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3262 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3263 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3270 /* Ensure we get an actual string whose length can be evaluated at
3271 compile-time, not an expression containing a string. This is
3272 because the latter will potentially produce pessimized code
3273 when used to produce the return value. */
3274 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3275 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3277 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3278 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3279 target
, mode
, /*endp=*/2,
3285 if (TREE_CODE (len
) == INTEGER_CST
)
3287 rtx len_rtx
= expand_normal (len
);
3289 if (CONST_INT_P (len_rtx
))
3291 ret
= expand_builtin_strcpy_args (dst
, src
, target
);
3297 if (mode
!= VOIDmode
)
3298 target
= gen_reg_rtx (mode
);
3300 target
= gen_reg_rtx (GET_MODE (ret
));
3302 if (GET_MODE (target
) != GET_MODE (ret
))
3303 ret
= gen_lowpart (GET_MODE (target
), ret
);
3305 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
3306 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3314 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3318 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3319 bytes from constant string DATA + OFFSET and return it as target
3323 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3326 const char *str
= (const char *) data
;
3328 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3331 return c_readstr (str
+ offset
, mode
);
3334 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3335 NULL_RTX if we failed the caller should emit a normal call. */
3338 expand_builtin_strncpy (tree exp
, rtx target
)
3340 location_t loc
= EXPR_LOCATION (exp
);
3342 if (validate_arglist (exp
,
3343 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3345 tree dest
= CALL_EXPR_ARG (exp
, 0);
3346 tree src
= CALL_EXPR_ARG (exp
, 1);
3347 tree len
= CALL_EXPR_ARG (exp
, 2);
3348 tree slen
= c_strlen (src
, 1);
3350 /* We must be passed a constant len and src parameter. */
3351 if (!tree_fits_uhwi_p (len
) || !slen
|| !tree_fits_uhwi_p (slen
))
3354 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
3356 /* We're required to pad with trailing zeros if the requested
3357 len is greater than strlen(s2)+1. In that case try to
3358 use store_by_pieces, if it fails, punt. */
3359 if (tree_int_cst_lt (slen
, len
))
3361 unsigned int dest_align
= get_pointer_alignment (dest
);
3362 const char *p
= c_getstr (src
);
3365 if (!p
|| dest_align
== 0 || !tree_fits_uhwi_p (len
)
3366 || !can_store_by_pieces (tree_to_uhwi (len
),
3367 builtin_strncpy_read_str
,
3368 CONST_CAST (char *, p
),
3372 dest_mem
= get_memory_rtx (dest
, len
);
3373 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3374 builtin_strncpy_read_str
,
3375 CONST_CAST (char *, p
), dest_align
, false, 0);
3376 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3377 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3384 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3385 bytes from constant string DATA + OFFSET and return it as target
3389 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3392 const char *c
= (const char *) data
;
3393 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
3395 memset (p
, *c
, GET_MODE_SIZE (mode
));
3397 return c_readstr (p
, mode
);
3400 /* Callback routine for store_by_pieces. Return the RTL of a register
3401 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3402 char value given in the RTL register data. For example, if mode is
3403 4 bytes wide, return the RTL for 0x01010101*data. */
3406 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3413 size
= GET_MODE_SIZE (mode
);
3417 p
= XALLOCAVEC (char, size
);
3418 memset (p
, 1, size
);
3419 coeff
= c_readstr (p
, mode
);
3421 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3422 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3423 return force_reg (mode
, target
);
3426 /* Expand expression EXP, which is a call to the memset builtin. Return
3427 NULL_RTX if we failed the caller should emit a normal call, otherwise
3428 try to get the result in TARGET, if convenient (and in mode MODE if that's
3432 expand_builtin_memset (tree exp
, rtx target
, machine_mode mode
)
3434 if (!validate_arglist (exp
,
3435 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3439 tree dest
= CALL_EXPR_ARG (exp
, 0);
3440 tree val
= CALL_EXPR_ARG (exp
, 1);
3441 tree len
= CALL_EXPR_ARG (exp
, 2);
3442 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3446 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3447 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3448 try to get the result in TARGET, if convenient (and in mode MODE if that's
3452 expand_builtin_memset_with_bounds (tree exp
, rtx target
, machine_mode mode
)
3454 if (!validate_arglist (exp
,
3455 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3456 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3460 tree dest
= CALL_EXPR_ARG (exp
, 0);
3461 tree val
= CALL_EXPR_ARG (exp
, 2);
3462 tree len
= CALL_EXPR_ARG (exp
, 3);
3463 rtx res
= expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3465 /* Return src bounds with the result. */
3468 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3469 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3470 res
= chkp_join_splitted_slot (res
, bnd
);
3476 /* Helper function to do the actual work for expand_builtin_memset. The
3477 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3478 so that this can also be called without constructing an actual CALL_EXPR.
3479 The other arguments and return value are the same as for
3480 expand_builtin_memset. */
3483 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3484 rtx target
, machine_mode mode
, tree orig_exp
)
3487 enum built_in_function fcode
;
3488 machine_mode val_mode
;
3490 unsigned int dest_align
;
3491 rtx dest_mem
, dest_addr
, len_rtx
;
3492 HOST_WIDE_INT expected_size
= -1;
3493 unsigned int expected_align
= 0;
3494 unsigned HOST_WIDE_INT min_size
;
3495 unsigned HOST_WIDE_INT max_size
;
3496 unsigned HOST_WIDE_INT probable_max_size
;
3498 dest_align
= get_pointer_alignment (dest
);
3500 /* If DEST is not a pointer type, don't do this operation in-line. */
3501 if (dest_align
== 0)
3504 if (currently_expanding_gimple_stmt
)
3505 stringop_block_profile (currently_expanding_gimple_stmt
,
3506 &expected_align
, &expected_size
);
3508 if (expected_align
< dest_align
)
3509 expected_align
= dest_align
;
3511 /* If the LEN parameter is zero, return DEST. */
3512 if (integer_zerop (len
))
3514 /* Evaluate and ignore VAL in case it has side-effects. */
3515 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3516 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3519 /* Stabilize the arguments in case we fail. */
3520 dest
= builtin_save_expr (dest
);
3521 val
= builtin_save_expr (val
);
3522 len
= builtin_save_expr (len
);
3524 len_rtx
= expand_normal (len
);
3525 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3526 &probable_max_size
);
3527 dest_mem
= get_memory_rtx (dest
, len
);
3528 val_mode
= TYPE_MODE (unsigned_char_type_node
);
3530 if (TREE_CODE (val
) != INTEGER_CST
)
3534 val_rtx
= expand_normal (val
);
3535 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
3537 /* Assume that we can memset by pieces if we can store
3538 * the coefficients by pieces (in the required modes).
3539 * We can't pass builtin_memset_gen_str as that emits RTL. */
3541 if (tree_fits_uhwi_p (len
)
3542 && can_store_by_pieces (tree_to_uhwi (len
),
3543 builtin_memset_read_str
, &c
, dest_align
,
3546 val_rtx
= force_reg (val_mode
, val_rtx
);
3547 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3548 builtin_memset_gen_str
, val_rtx
, dest_align
,
3551 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3552 dest_align
, expected_align
,
3553 expected_size
, min_size
, max_size
,
3557 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3558 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3562 if (target_char_cast (val
, &c
))
3567 if (tree_fits_uhwi_p (len
)
3568 && can_store_by_pieces (tree_to_uhwi (len
),
3569 builtin_memset_read_str
, &c
, dest_align
,
3571 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3572 builtin_memset_read_str
, &c
, dest_align
, true, 0);
3573 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
3574 gen_int_mode (c
, val_mode
),
3575 dest_align
, expected_align
,
3576 expected_size
, min_size
, max_size
,
3580 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3581 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3585 set_mem_align (dest_mem
, dest_align
);
3586 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
3587 CALL_EXPR_TAILCALL (orig_exp
)
3588 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3589 expected_align
, expected_size
,
3595 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3596 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3602 fndecl
= get_callee_fndecl (orig_exp
);
3603 fcode
= DECL_FUNCTION_CODE (fndecl
);
3604 if (fcode
== BUILT_IN_MEMSET
3605 || fcode
== BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
)
3606 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
3608 else if (fcode
== BUILT_IN_BZERO
)
3609 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
3613 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3614 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
3615 return expand_call (fn
, target
, target
== const0_rtx
);
3618 /* Expand expression EXP, which is a call to the bzero builtin. Return
3619 NULL_RTX if we failed the caller should emit a normal call. */
3622 expand_builtin_bzero (tree exp
)
3625 location_t loc
= EXPR_LOCATION (exp
);
3627 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3630 dest
= CALL_EXPR_ARG (exp
, 0);
3631 size
= CALL_EXPR_ARG (exp
, 1);
3633 /* New argument list transforming bzero(ptr x, int y) to
3634 memset(ptr x, int 0, size_t y). This is done this way
3635 so that if it isn't expanded inline, we fallback to
3636 calling bzero instead of memset. */
3638 return expand_builtin_memset_args (dest
, integer_zero_node
,
3639 fold_convert_loc (loc
,
3640 size_type_node
, size
),
3641 const0_rtx
, VOIDmode
, exp
);
3644 /* Try to expand cmpstr operation ICODE with the given operands.
3645 Return the result rtx on success, otherwise return null. */
3648 expand_cmpstr (insn_code icode
, rtx target
, rtx arg1_rtx
, rtx arg2_rtx
,
3649 HOST_WIDE_INT align
)
3651 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
3653 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
3656 struct expand_operand ops
[4];
3657 create_output_operand (&ops
[0], target
, insn_mode
);
3658 create_fixed_operand (&ops
[1], arg1_rtx
);
3659 create_fixed_operand (&ops
[2], arg2_rtx
);
3660 create_integer_operand (&ops
[3], align
);
3661 if (maybe_expand_insn (icode
, 4, ops
))
3662 return ops
[0].value
;
3666 /* Expand expression EXP, which is a call to the memcmp built-in function.
3667 Return NULL_RTX if we failed and the caller should emit a normal call,
3668 otherwise try to get the result in TARGET, if convenient.
3669 RESULT_EQ is true if we can relax the returned value to be either zero
3670 or nonzero, without caring about the sign. */
3673 expand_builtin_memcmp (tree exp
, rtx target
, bool result_eq
)
3675 if (!validate_arglist (exp
,
3676 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3679 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3680 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3681 tree len
= CALL_EXPR_ARG (exp
, 2);
3682 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
3683 location_t loc
= EXPR_LOCATION (exp
);
3685 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3686 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3688 /* If we don't have POINTER_TYPE, call the function. */
3689 if (arg1_align
== 0 || arg2_align
== 0)
3692 rtx arg1_rtx
= get_memory_rtx (arg1
, len
);
3693 rtx arg2_rtx
= get_memory_rtx (arg2
, len
);
3694 rtx len_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
3696 /* Set MEM_SIZE as appropriate. */
3697 if (CONST_INT_P (len_rtx
))
3699 set_mem_size (arg1_rtx
, INTVAL (len_rtx
));
3700 set_mem_size (arg2_rtx
, INTVAL (len_rtx
));
3703 by_pieces_constfn constfn
= NULL
;
3705 const char *src_str
= c_getstr (arg2
);
3706 if (result_eq
&& src_str
== NULL
)
3708 src_str
= c_getstr (arg1
);
3709 if (src_str
!= NULL
)
3710 std::swap (arg1_rtx
, arg2_rtx
);
3713 /* If SRC is a string constant and block move would be done
3714 by pieces, we can avoid loading the string from memory
3715 and only stored the computed constants. */
3717 && CONST_INT_P (len_rtx
)
3718 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1)
3719 constfn
= builtin_memcpy_read_str
;
3721 rtx result
= emit_block_cmp_hints (arg1_rtx
, arg2_rtx
, len_rtx
,
3722 TREE_TYPE (len
), target
,
3724 CONST_CAST (char *, src_str
));
3728 /* Return the value in the proper mode for this function. */
3729 if (GET_MODE (result
) == mode
)
3734 convert_move (target
, result
, 0);
3738 return convert_to_mode (mode
, result
, 0);
3744 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3745 if we failed the caller should emit a normal call, otherwise try to get
3746 the result in TARGET, if convenient. */
3749 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
3751 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3754 insn_code cmpstr_icode
= direct_optab_handler (cmpstr_optab
, SImode
);
3755 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
3756 if (cmpstr_icode
!= CODE_FOR_nothing
|| cmpstrn_icode
!= CODE_FOR_nothing
)
3758 rtx arg1_rtx
, arg2_rtx
;
3760 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3761 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3762 rtx result
= NULL_RTX
;
3764 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3765 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3767 /* If we don't have POINTER_TYPE, call the function. */
3768 if (arg1_align
== 0 || arg2_align
== 0)
3771 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3772 arg1
= builtin_save_expr (arg1
);
3773 arg2
= builtin_save_expr (arg2
);
3775 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
3776 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
3778 /* Try to call cmpstrsi. */
3779 if (cmpstr_icode
!= CODE_FOR_nothing
)
3780 result
= expand_cmpstr (cmpstr_icode
, target
, arg1_rtx
, arg2_rtx
,
3781 MIN (arg1_align
, arg2_align
));
3783 /* Try to determine at least one length and call cmpstrnsi. */
3784 if (!result
&& cmpstrn_icode
!= CODE_FOR_nothing
)
3789 tree len1
= c_strlen (arg1
, 1);
3790 tree len2
= c_strlen (arg2
, 1);
3793 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
3795 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
3797 /* If we don't have a constant length for the first, use the length
3798 of the second, if we know it. We don't require a constant for
3799 this case; some cost analysis could be done if both are available
3800 but neither is constant. For now, assume they're equally cheap,
3801 unless one has side effects. If both strings have constant lengths,
3808 else if (TREE_SIDE_EFFECTS (len1
))
3810 else if (TREE_SIDE_EFFECTS (len2
))
3812 else if (TREE_CODE (len1
) != INTEGER_CST
)
3814 else if (TREE_CODE (len2
) != INTEGER_CST
)
3816 else if (tree_int_cst_lt (len1
, len2
))
3821 /* If both arguments have side effects, we cannot optimize. */
3822 if (len
&& !TREE_SIDE_EFFECTS (len
))
3824 arg3_rtx
= expand_normal (len
);
3825 result
= expand_cmpstrn_or_cmpmem
3826 (cmpstrn_icode
, target
, arg1_rtx
, arg2_rtx
, TREE_TYPE (len
),
3827 arg3_rtx
, MIN (arg1_align
, arg2_align
));
3833 /* Return the value in the proper mode for this function. */
3834 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
3835 if (GET_MODE (result
) == mode
)
3838 return convert_to_mode (mode
, result
, 0);
3839 convert_move (target
, result
, 0);
3843 /* Expand the library call ourselves using a stabilized argument
3844 list to avoid re-evaluating the function's arguments twice. */
3845 fndecl
= get_callee_fndecl (exp
);
3846 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
3847 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3848 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
3849 return expand_call (fn
, target
, target
== const0_rtx
);
3854 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3855 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3856 the result in TARGET, if convenient. */
3859 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
3860 ATTRIBUTE_UNUSED machine_mode mode
)
3862 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
3864 if (!validate_arglist (exp
,
3865 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3868 /* If c_strlen can determine an expression for one of the string
3869 lengths, and it doesn't have side effects, then emit cmpstrnsi
3870 using length MIN(strlen(string)+1, arg3). */
3871 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
3872 if (cmpstrn_icode
!= CODE_FOR_nothing
)
3874 tree len
, len1
, len2
;
3875 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3878 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3879 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3880 tree arg3
= CALL_EXPR_ARG (exp
, 2);
3882 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3883 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3885 len1
= c_strlen (arg1
, 1);
3886 len2
= c_strlen (arg2
, 1);
3889 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
3891 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
3893 /* If we don't have a constant length for the first, use the length
3894 of the second, if we know it. We don't require a constant for
3895 this case; some cost analysis could be done if both are available
3896 but neither is constant. For now, assume they're equally cheap,
3897 unless one has side effects. If both strings have constant lengths,
3904 else if (TREE_SIDE_EFFECTS (len1
))
3906 else if (TREE_SIDE_EFFECTS (len2
))
3908 else if (TREE_CODE (len1
) != INTEGER_CST
)
3910 else if (TREE_CODE (len2
) != INTEGER_CST
)
3912 else if (tree_int_cst_lt (len1
, len2
))
3917 /* If both arguments have side effects, we cannot optimize. */
3918 if (!len
|| TREE_SIDE_EFFECTS (len
))
3921 /* The actual new length parameter is MIN(len,arg3). */
3922 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
,
3923 fold_convert_loc (loc
, TREE_TYPE (len
), arg3
));
3925 /* If we don't have POINTER_TYPE, call the function. */
3926 if (arg1_align
== 0 || arg2_align
== 0)
3929 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3930 arg1
= builtin_save_expr (arg1
);
3931 arg2
= builtin_save_expr (arg2
);
3932 len
= builtin_save_expr (len
);
3934 arg1_rtx
= get_memory_rtx (arg1
, len
);
3935 arg2_rtx
= get_memory_rtx (arg2
, len
);
3936 arg3_rtx
= expand_normal (len
);
3937 result
= expand_cmpstrn_or_cmpmem (cmpstrn_icode
, target
, arg1_rtx
,
3938 arg2_rtx
, TREE_TYPE (len
), arg3_rtx
,
3939 MIN (arg1_align
, arg2_align
));
3942 /* Return the value in the proper mode for this function. */
3943 mode
= TYPE_MODE (TREE_TYPE (exp
));
3944 if (GET_MODE (result
) == mode
)
3947 return convert_to_mode (mode
, result
, 0);
3948 convert_move (target
, result
, 0);
3952 /* Expand the library call ourselves using a stabilized argument
3953 list to avoid re-evaluating the function's arguments twice. */
3954 fndecl
= get_callee_fndecl (exp
);
3955 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 3,
3957 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3958 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
3959 return expand_call (fn
, target
, target
== const0_rtx
);
3964 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3965 if that's convenient. */
3968 expand_builtin_saveregs (void)
3973 /* Don't do __builtin_saveregs more than once in a function.
3974 Save the result of the first call and reuse it. */
3975 if (saveregs_value
!= 0)
3976 return saveregs_value
;
3978 /* When this function is called, it means that registers must be
3979 saved on entry to this function. So we migrate the call to the
3980 first insn of this function. */
3984 /* Do whatever the machine needs done in this case. */
3985 val
= targetm
.calls
.expand_builtin_saveregs ();
3990 saveregs_value
= val
;
3992 /* Put the insns after the NOTE that starts the function. If this
3993 is inside a start_sequence, make the outer-level insn chain current, so
3994 the code is placed at the start of the function. */
3995 push_topmost_sequence ();
3996 emit_insn_after (seq
, entry_of_function ());
3997 pop_topmost_sequence ();
4002 /* Expand a call to __builtin_next_arg. */
4005 expand_builtin_next_arg (void)
4007 /* Checking arguments is already done in fold_builtin_next_arg
4008 that must be called before this function. */
4009 return expand_binop (ptr_mode
, add_optab
,
4010 crtl
->args
.internal_arg_pointer
,
4011 crtl
->args
.arg_offset_rtx
,
4012 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4015 /* Make it easier for the backends by protecting the valist argument
4016 from multiple evaluations. */
4019 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4021 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4023 /* The current way of determining the type of valist is completely
4024 bogus. We should have the information on the va builtin instead. */
4026 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4028 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4030 if (TREE_SIDE_EFFECTS (valist
))
4031 valist
= save_expr (valist
);
4033 /* For this case, the backends will be expecting a pointer to
4034 vatype, but it's possible we've actually been given an array
4035 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4037 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4039 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4040 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4045 tree pt
= build_pointer_type (vatype
);
4049 if (! TREE_SIDE_EFFECTS (valist
))
4052 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4053 TREE_SIDE_EFFECTS (valist
) = 1;
4056 if (TREE_SIDE_EFFECTS (valist
))
4057 valist
= save_expr (valist
);
4058 valist
= fold_build2_loc (loc
, MEM_REF
,
4059 vatype
, valist
, build_int_cst (pt
, 0));
4065 /* The "standard" definition of va_list is void*. */
4068 std_build_builtin_va_list (void)
4070 return ptr_type_node
;
4073 /* The "standard" abi va_list is va_list_type_node. */
4076 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4078 return va_list_type_node
;
4081 /* The "standard" type of va_list is va_list_type_node. */
4084 std_canonical_va_list_type (tree type
)
4088 wtype
= va_list_type_node
;
4091 if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4093 /* If va_list is an array type, the argument may have decayed
4094 to a pointer type, e.g. by being passed to another function.
4095 In that case, unwrap both types so that we can compare the
4096 underlying records. */
4097 if (TREE_CODE (htype
) == ARRAY_TYPE
4098 || POINTER_TYPE_P (htype
))
4100 wtype
= TREE_TYPE (wtype
);
4101 htype
= TREE_TYPE (htype
);
4104 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4105 return va_list_type_node
;
4110 /* The "standard" implementation of va_start: just assign `nextarg' to
4114 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4116 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4117 convert_move (va_r
, nextarg
, 0);
4119 /* We do not have any valid bounds for the pointer, so
4120 just store zero bounds for it. */
4121 if (chkp_function_instrumented_p (current_function_decl
))
4122 chkp_expand_bounds_reset_for_mem (valist
,
4123 make_tree (TREE_TYPE (valist
),
4127 /* Expand EXP, a call to __builtin_va_start. */
4130 expand_builtin_va_start (tree exp
)
4134 location_t loc
= EXPR_LOCATION (exp
);
4136 if (call_expr_nargs (exp
) < 2)
4138 error_at (loc
, "too few arguments to function %<va_start%>");
4142 if (fold_builtin_next_arg (exp
, true))
4145 nextarg
= expand_builtin_next_arg ();
4146 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4148 if (targetm
.expand_builtin_va_start
)
4149 targetm
.expand_builtin_va_start (valist
, nextarg
);
4151 std_expand_builtin_va_start (valist
, nextarg
);
4156 /* Expand EXP, a call to __builtin_va_end. */
4159 expand_builtin_va_end (tree exp
)
4161 tree valist
= CALL_EXPR_ARG (exp
, 0);
4163 /* Evaluate for side effects, if needed. I hate macros that don't
4165 if (TREE_SIDE_EFFECTS (valist
))
4166 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4171 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4172 builtin rather than just as an assignment in stdarg.h because of the
4173 nastiness of array-type va_list types. */
4176 expand_builtin_va_copy (tree exp
)
4179 location_t loc
= EXPR_LOCATION (exp
);
4181 dst
= CALL_EXPR_ARG (exp
, 0);
4182 src
= CALL_EXPR_ARG (exp
, 1);
4184 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4185 src
= stabilize_va_list_loc (loc
, src
, 0);
4187 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4189 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4191 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4192 TREE_SIDE_EFFECTS (t
) = 1;
4193 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4197 rtx dstb
, srcb
, size
;
4199 /* Evaluate to pointers. */
4200 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4201 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4202 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4203 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4205 dstb
= convert_memory_address (Pmode
, dstb
);
4206 srcb
= convert_memory_address (Pmode
, srcb
);
4208 /* "Dereference" to BLKmode memories. */
4209 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4210 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4211 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4212 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4213 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4214 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4217 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4223 /* Expand a call to one of the builtin functions __builtin_frame_address or
4224 __builtin_return_address. */
4227 expand_builtin_frame_address (tree fndecl
, tree exp
)
4229 /* The argument must be a nonnegative integer constant.
4230 It counts the number of frames to scan up the stack.
4231 The value is either the frame pointer value or the return
4232 address saved in that frame. */
4233 if (call_expr_nargs (exp
) == 0)
4234 /* Warning about missing arg was already issued. */
4236 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp
, 0)))
4238 error ("invalid argument to %qD", fndecl
);
4243 /* Number of frames to scan up the stack. */
4244 unsigned HOST_WIDE_INT count
= tree_to_uhwi (CALL_EXPR_ARG (exp
, 0));
4246 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
), count
);
4248 /* Some ports cannot access arbitrary stack frames. */
4251 warning (0, "unsupported argument to %qD", fndecl
);
4257 /* Warn since no effort is made to ensure that any frame
4258 beyond the current one exists or can be safely reached. */
4259 warning (OPT_Wframe_address
, "calling %qD with "
4260 "a nonzero argument is unsafe", fndecl
);
4263 /* For __builtin_frame_address, return what we've got. */
4264 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4268 && ! CONSTANT_P (tem
))
4269 tem
= copy_addr_to_reg (tem
);
4274 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4275 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4276 is the same as for allocate_dynamic_stack_space. */
4279 expand_builtin_alloca (tree exp
, bool cannot_accumulate
)
4285 bool alloca_with_align
= (DECL_FUNCTION_CODE (get_callee_fndecl (exp
))
4286 == BUILT_IN_ALLOCA_WITH_ALIGN
);
4289 = (alloca_with_align
4290 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4291 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
4296 /* Compute the argument. */
4297 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4299 /* Compute the alignment. */
4300 align
= (alloca_with_align
4301 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1))
4302 : BIGGEST_ALIGNMENT
);
4304 /* Allocate the desired space. */
4305 result
= allocate_dynamic_stack_space (op0
, 0, align
, cannot_accumulate
);
4306 result
= convert_memory_address (ptr_mode
, result
);
4311 /* Expand a call to bswap builtin in EXP.
4312 Return NULL_RTX if a normal call should be emitted rather than expanding the
4313 function in-line. If convenient, the result should be placed in TARGET.
4314 SUBTARGET may be used as the target for computing one of EXP's operands. */
4317 expand_builtin_bswap (machine_mode target_mode
, tree exp
, rtx target
,
4323 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4326 arg
= CALL_EXPR_ARG (exp
, 0);
4327 op0
= expand_expr (arg
,
4328 subtarget
&& GET_MODE (subtarget
) == target_mode
4329 ? subtarget
: NULL_RTX
,
4330 target_mode
, EXPAND_NORMAL
);
4331 if (GET_MODE (op0
) != target_mode
)
4332 op0
= convert_to_mode (target_mode
, op0
, 1);
4334 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
4336 gcc_assert (target
);
4338 return convert_to_mode (target_mode
, target
, 1);
4341 /* Expand a call to a unary builtin in EXP.
4342 Return NULL_RTX if a normal call should be emitted rather than expanding the
4343 function in-line. If convenient, the result should be placed in TARGET.
4344 SUBTARGET may be used as the target for computing one of EXP's operands. */
4347 expand_builtin_unop (machine_mode target_mode
, tree exp
, rtx target
,
4348 rtx subtarget
, optab op_optab
)
4352 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4355 /* Compute the argument. */
4356 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
4358 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
4359 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
4360 VOIDmode
, EXPAND_NORMAL
);
4361 /* Compute op, into TARGET if possible.
4362 Set TARGET to wherever the result comes back. */
4363 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
4364 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
4365 gcc_assert (target
);
4367 return convert_to_mode (target_mode
, target
, 0);
4370 /* Expand a call to __builtin_expect. We just return our argument
4371 as the builtin_expect semantic should've been already executed by
4372 tree branch prediction pass. */
4375 expand_builtin_expect (tree exp
, rtx target
)
4379 if (call_expr_nargs (exp
) < 2)
4381 arg
= CALL_EXPR_ARG (exp
, 0);
4383 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4384 /* When guessing was done, the hints should be already stripped away. */
4385 gcc_assert (!flag_guess_branch_prob
4386 || optimize
== 0 || seen_error ());
4390 /* Expand a call to __builtin_assume_aligned. We just return our first
4391 argument as the builtin_assume_aligned semantic should've been already
4395 expand_builtin_assume_aligned (tree exp
, rtx target
)
4397 if (call_expr_nargs (exp
) < 2)
4399 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
4401 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
4402 && (call_expr_nargs (exp
) < 3
4403 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
4408 expand_builtin_trap (void)
4410 if (targetm
.have_trap ())
4412 rtx_insn
*insn
= emit_insn (targetm
.gen_trap ());
4413 /* For trap insns when not accumulating outgoing args force
4414 REG_ARGS_SIZE note to prevent crossjumping of calls with
4415 different args sizes. */
4416 if (!ACCUMULATE_OUTGOING_ARGS
)
4417 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4421 tree fn
= builtin_decl_implicit (BUILT_IN_ABORT
);
4422 tree call_expr
= build_call_expr (fn
, 0);
4423 expand_call (call_expr
, NULL_RTX
, false);
4429 /* Expand a call to __builtin_unreachable. We do nothing except emit
4430 a barrier saying that control flow will not pass here.
4432 It is the responsibility of the program being compiled to ensure
4433 that control flow does never reach __builtin_unreachable. */
4435 expand_builtin_unreachable (void)
4440 /* Expand EXP, a call to fabs, fabsf or fabsl.
4441 Return NULL_RTX if a normal call should be emitted rather than expanding
4442 the function inline. If convenient, the result should be placed
4443 in TARGET. SUBTARGET may be used as the target for computing
4447 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
4453 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4456 arg
= CALL_EXPR_ARG (exp
, 0);
4457 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
4458 mode
= TYPE_MODE (TREE_TYPE (arg
));
4459 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4460 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
4463 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4464 Return NULL is a normal call should be emitted rather than expanding the
4465 function inline. If convenient, the result should be placed in TARGET.
4466 SUBTARGET may be used as the target for computing the operand. */
4469 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
4474 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
4477 arg
= CALL_EXPR_ARG (exp
, 0);
4478 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4480 arg
= CALL_EXPR_ARG (exp
, 1);
4481 op1
= expand_normal (arg
);
4483 return expand_copysign (op0
, op1
, target
);
4486 /* Expand a call to __builtin___clear_cache. */
4489 expand_builtin___clear_cache (tree exp
)
4491 if (!targetm
.code_for_clear_cache
)
4493 #ifdef CLEAR_INSN_CACHE
4494 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4495 does something. Just do the default expansion to a call to
4499 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4500 does nothing. There is no need to call it. Do nothing. */
4502 #endif /* CLEAR_INSN_CACHE */
4505 /* We have a "clear_cache" insn, and it will handle everything. */
4507 rtx begin_rtx
, end_rtx
;
4509 /* We must not expand to a library call. If we did, any
4510 fallback library function in libgcc that might contain a call to
4511 __builtin___clear_cache() would recurse infinitely. */
4512 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4514 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4518 if (targetm
.have_clear_cache ())
4520 struct expand_operand ops
[2];
4522 begin
= CALL_EXPR_ARG (exp
, 0);
4523 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4525 end
= CALL_EXPR_ARG (exp
, 1);
4526 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4528 create_address_operand (&ops
[0], begin_rtx
);
4529 create_address_operand (&ops
[1], end_rtx
);
4530 if (maybe_expand_insn (targetm
.code_for_clear_cache
, 2, ops
))
4536 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4539 round_trampoline_addr (rtx tramp
)
4541 rtx temp
, addend
, mask
;
4543 /* If we don't need too much alignment, we'll have been guaranteed
4544 proper alignment by get_trampoline_type. */
4545 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
4548 /* Round address up to desired boundary. */
4549 temp
= gen_reg_rtx (Pmode
);
4550 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
4551 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
4553 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
4554 temp
, 0, OPTAB_LIB_WIDEN
);
4555 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
4556 temp
, 0, OPTAB_LIB_WIDEN
);
4562 expand_builtin_init_trampoline (tree exp
, bool onstack
)
4564 tree t_tramp
, t_func
, t_chain
;
4565 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
4567 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
4568 POINTER_TYPE
, VOID_TYPE
))
4571 t_tramp
= CALL_EXPR_ARG (exp
, 0);
4572 t_func
= CALL_EXPR_ARG (exp
, 1);
4573 t_chain
= CALL_EXPR_ARG (exp
, 2);
4575 r_tramp
= expand_normal (t_tramp
);
4576 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
4577 MEM_NOTRAP_P (m_tramp
) = 1;
4579 /* If ONSTACK, the TRAMP argument should be the address of a field
4580 within the local function's FRAME decl. Either way, let's see if
4581 we can fill in the MEM_ATTRs for this memory. */
4582 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
4583 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
4585 /* Creator of a heap trampoline is responsible for making sure the
4586 address is aligned to at least STACK_BOUNDARY. Normally malloc
4587 will ensure this anyhow. */
4588 tmp
= round_trampoline_addr (r_tramp
);
4591 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
4592 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
4593 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
4596 /* The FUNC argument should be the address of the nested function.
4597 Extract the actual function decl to pass to the hook. */
4598 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
4599 t_func
= TREE_OPERAND (t_func
, 0);
4600 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
4602 r_chain
= expand_normal (t_chain
);
4604 /* Generate insns to initialize the trampoline. */
4605 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
4609 trampolines_created
= 1;
4611 if (targetm
.calls
.custom_function_descriptors
!= 0)
4612 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
4613 "trampoline generated for nested function %qD", t_func
);
4620 expand_builtin_adjust_trampoline (tree exp
)
4624 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
4627 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4628 tramp
= round_trampoline_addr (tramp
);
4629 if (targetm
.calls
.trampoline_adjust_address
)
4630 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
4635 /* Expand a call to the builtin descriptor initialization routine.
4636 A descriptor is made up of a couple of pointers to the static
4637 chain and the code entry in this order. */
4640 expand_builtin_init_descriptor (tree exp
)
4642 tree t_descr
, t_func
, t_chain
;
4643 rtx m_descr
, r_descr
, r_func
, r_chain
;
4645 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, POINTER_TYPE
,
4649 t_descr
= CALL_EXPR_ARG (exp
, 0);
4650 t_func
= CALL_EXPR_ARG (exp
, 1);
4651 t_chain
= CALL_EXPR_ARG (exp
, 2);
4653 r_descr
= expand_normal (t_descr
);
4654 m_descr
= gen_rtx_MEM (BLKmode
, r_descr
);
4655 MEM_NOTRAP_P (m_descr
) = 1;
4657 r_func
= expand_normal (t_func
);
4658 r_chain
= expand_normal (t_chain
);
4660 /* Generate insns to initialize the descriptor. */
4661 emit_move_insn (adjust_address_nv (m_descr
, ptr_mode
, 0), r_chain
);
4662 emit_move_insn (adjust_address_nv (m_descr
, ptr_mode
,
4663 POINTER_SIZE
/ BITS_PER_UNIT
), r_func
);
4668 /* Expand a call to the builtin descriptor adjustment routine. */
4671 expand_builtin_adjust_descriptor (tree exp
)
4675 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
4678 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4680 /* Unalign the descriptor to allow runtime identification. */
4681 tramp
= plus_constant (ptr_mode
, tramp
,
4682 targetm
.calls
.custom_function_descriptors
);
4684 return force_operand (tramp
, NULL_RTX
);
4687 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4688 function. The function first checks whether the back end provides
4689 an insn to implement signbit for the respective mode. If not, it
4690 checks whether the floating point format of the value is such that
4691 the sign bit can be extracted. If that is not the case, error out.
4692 EXP is the expression that is a call to the builtin function; if
4693 convenient, the result should be placed in TARGET. */
4695 expand_builtin_signbit (tree exp
, rtx target
)
4697 const struct real_format
*fmt
;
4698 machine_mode fmode
, imode
, rmode
;
4701 enum insn_code icode
;
4703 location_t loc
= EXPR_LOCATION (exp
);
4705 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4708 arg
= CALL_EXPR_ARG (exp
, 0);
4709 fmode
= TYPE_MODE (TREE_TYPE (arg
));
4710 rmode
= TYPE_MODE (TREE_TYPE (exp
));
4711 fmt
= REAL_MODE_FORMAT (fmode
);
4713 arg
= builtin_save_expr (arg
);
4715 /* Expand the argument yielding a RTX expression. */
4716 temp
= expand_normal (arg
);
4718 /* Check if the back end provides an insn that handles signbit for the
4720 icode
= optab_handler (signbit_optab
, fmode
);
4721 if (icode
!= CODE_FOR_nothing
)
4723 rtx_insn
*last
= get_last_insn ();
4724 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
4725 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
4727 delete_insns_since (last
);
4730 /* For floating point formats without a sign bit, implement signbit
4732 bitpos
= fmt
->signbit_ro
;
4735 /* But we can't do this if the format supports signed zero. */
4736 gcc_assert (!fmt
->has_signed_zero
|| !HONOR_SIGNED_ZEROS (fmode
));
4738 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
4739 build_real (TREE_TYPE (arg
), dconst0
));
4740 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4743 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
4745 imode
= int_mode_for_mode (fmode
);
4746 gcc_assert (imode
!= BLKmode
);
4747 temp
= gen_lowpart (imode
, temp
);
4752 /* Handle targets with different FP word orders. */
4753 if (FLOAT_WORDS_BIG_ENDIAN
)
4754 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
4756 word
= bitpos
/ BITS_PER_WORD
;
4757 temp
= operand_subword_force (temp
, word
, fmode
);
4758 bitpos
= bitpos
% BITS_PER_WORD
;
4761 /* Force the intermediate word_mode (or narrower) result into a
4762 register. This avoids attempting to create paradoxical SUBREGs
4763 of floating point modes below. */
4764 temp
= force_reg (imode
, temp
);
4766 /* If the bitpos is within the "result mode" lowpart, the operation
4767 can be implement with a single bitwise AND. Otherwise, we need
4768 a right shift and an AND. */
4770 if (bitpos
< GET_MODE_BITSIZE (rmode
))
4772 wide_int mask
= wi::set_bit_in_zero (bitpos
, GET_MODE_PRECISION (rmode
));
4774 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
4775 temp
= gen_lowpart (rmode
, temp
);
4776 temp
= expand_binop (rmode
, and_optab
, temp
,
4777 immed_wide_int_const (mask
, rmode
),
4778 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
4782 /* Perform a logical right shift to place the signbit in the least
4783 significant bit, then truncate the result to the desired mode
4784 and mask just this bit. */
4785 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
4786 temp
= gen_lowpart (rmode
, temp
);
4787 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
4788 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
4794 /* Expand fork or exec calls. TARGET is the desired target of the
4795 call. EXP is the call. FN is the
4796 identificator of the actual function. IGNORE is nonzero if the
4797 value is to be ignored. */
4800 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
4805 /* If we are not profiling, just call the function. */
4806 if (!profile_arc_flag
)
4809 /* Otherwise call the wrapper. This should be equivalent for the rest of
4810 compiler, so the code does not diverge, and the wrapper may run the
4811 code necessary for keeping the profiling sane. */
4813 switch (DECL_FUNCTION_CODE (fn
))
4816 id
= get_identifier ("__gcov_fork");
4819 case BUILT_IN_EXECL
:
4820 id
= get_identifier ("__gcov_execl");
4823 case BUILT_IN_EXECV
:
4824 id
= get_identifier ("__gcov_execv");
4827 case BUILT_IN_EXECLP
:
4828 id
= get_identifier ("__gcov_execlp");
4831 case BUILT_IN_EXECLE
:
4832 id
= get_identifier ("__gcov_execle");
4835 case BUILT_IN_EXECVP
:
4836 id
= get_identifier ("__gcov_execvp");
4839 case BUILT_IN_EXECVE
:
4840 id
= get_identifier ("__gcov_execve");
4847 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
4848 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
4849 DECL_EXTERNAL (decl
) = 1;
4850 TREE_PUBLIC (decl
) = 1;
4851 DECL_ARTIFICIAL (decl
) = 1;
4852 TREE_NOTHROW (decl
) = 1;
4853 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
4854 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
4855 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
4856 return expand_call (call
, target
, ignore
);
4861 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
4862 the pointer in these functions is void*, the tree optimizers may remove
4863 casts. The mode computed in expand_builtin isn't reliable either, due
4864 to __sync_bool_compare_and_swap.
4866 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
4867 group of builtins. This gives us log2 of the mode size. */
4869 static inline machine_mode
4870 get_builtin_sync_mode (int fcode_diff
)
4872 /* The size is not negotiable, so ask not to get BLKmode in return
4873 if the target indicates that a smaller size would be better. */
4874 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
4877 /* Expand the memory expression LOC and return the appropriate memory operand
4878 for the builtin_sync operations. */
4881 get_builtin_sync_mem (tree loc
, machine_mode mode
)
4885 addr
= expand_expr (loc
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
4886 addr
= convert_memory_address (Pmode
, addr
);
4888 /* Note that we explicitly do not want any alias information for this
4889 memory, so that we kill all other live memories. Otherwise we don't
4890 satisfy the full barrier semantics of the intrinsic. */
4891 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
4893 /* The alignment needs to be at least according to that of the mode. */
4894 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
4895 get_pointer_alignment (loc
)));
4896 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
4897 MEM_VOLATILE_P (mem
) = 1;
4902 /* Make sure an argument is in the right mode.
4903 EXP is the tree argument.
4904 MODE is the mode it should be in. */
4907 expand_expr_force_mode (tree exp
, machine_mode mode
)
4910 machine_mode old_mode
;
4912 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
4913 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
4914 of CONST_INTs, where we know the old_mode only from the call argument. */
4916 old_mode
= GET_MODE (val
);
4917 if (old_mode
== VOIDmode
)
4918 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
4919 val
= convert_modes (mode
, old_mode
, val
, 1);
4924 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
4925 EXP is the CALL_EXPR. CODE is the rtx code
4926 that corresponds to the arithmetic or logical operation from the name;
4927 an exception here is that NOT actually means NAND. TARGET is an optional
4928 place for us to store the results; AFTER is true if this is the
4929 fetch_and_xxx form. */
4932 expand_builtin_sync_operation (machine_mode mode
, tree exp
,
4933 enum rtx_code code
, bool after
,
4937 location_t loc
= EXPR_LOCATION (exp
);
4939 if (code
== NOT
&& warn_sync_nand
)
4941 tree fndecl
= get_callee_fndecl (exp
);
4942 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
4944 static bool warned_f_a_n
, warned_n_a_f
;
4948 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
4949 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
4950 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
4951 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
4952 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
4956 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
4957 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
4958 warned_f_a_n
= true;
4961 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
4962 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
4963 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
4964 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
4965 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
4969 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
4970 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
4971 warned_n_a_f
= true;
4979 /* Expand the operands. */
4980 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
4981 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
4983 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SYNC_SEQ_CST
,
4987 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
4988 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
4989 true if this is the boolean form. TARGET is a place for us to store the
4990 results; this is NOT optional if IS_BOOL is true. */
4993 expand_builtin_compare_and_swap (machine_mode mode
, tree exp
,
4994 bool is_bool
, rtx target
)
4996 rtx old_val
, new_val
, mem
;
4999 /* Expand the operands. */
5000 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5001 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5002 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5004 pbool
= poval
= NULL
;
5005 if (target
!= const0_rtx
)
5012 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
5013 false, MEMMODEL_SYNC_SEQ_CST
,
5014 MEMMODEL_SYNC_SEQ_CST
))
5020 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5021 general form is actually an atomic exchange, and some targets only
5022 support a reduced form with the second argument being a constant 1.
5023 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5027 expand_builtin_sync_lock_test_and_set (machine_mode mode
, tree exp
,
5032 /* Expand the operands. */
5033 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5034 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5036 return expand_sync_lock_test_and_set (target
, mem
, val
);
5039 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5042 expand_builtin_sync_lock_release (machine_mode mode
, tree exp
)
5046 /* Expand the operands. */
5047 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5049 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_SYNC_RELEASE
, true);
5052 /* Given an integer representing an ``enum memmodel'', verify its
5053 correctness and return the memory model enum. */
5055 static enum memmodel
5056 get_memmodel (tree exp
)
5059 unsigned HOST_WIDE_INT val
;
5061 = expansion_point_location_if_in_system_header (input_location
);
5063 /* If the parameter is not a constant, it's a run time value so we'll just
5064 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5065 if (TREE_CODE (exp
) != INTEGER_CST
)
5066 return MEMMODEL_SEQ_CST
;
5068 op
= expand_normal (exp
);
5071 if (targetm
.memmodel_check
)
5072 val
= targetm
.memmodel_check (val
);
5073 else if (val
& ~MEMMODEL_MASK
)
5075 warning_at (loc
, OPT_Winvalid_memory_model
,
5076 "unknown architecture specifier in memory model to builtin");
5077 return MEMMODEL_SEQ_CST
;
5080 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5081 if (memmodel_base (val
) >= MEMMODEL_LAST
)
5083 warning_at (loc
, OPT_Winvalid_memory_model
,
5084 "invalid memory model argument to builtin");
5085 return MEMMODEL_SEQ_CST
;
5088 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5089 be conservative and promote consume to acquire. */
5090 if (val
== MEMMODEL_CONSUME
)
5091 val
= MEMMODEL_ACQUIRE
;
5093 return (enum memmodel
) val
;
5096 /* Expand the __atomic_exchange intrinsic:
5097 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5098 EXP is the CALL_EXPR.
5099 TARGET is an optional place for us to store the results. */
5102 expand_builtin_atomic_exchange (machine_mode mode
, tree exp
, rtx target
)
5105 enum memmodel model
;
5107 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5109 if (!flag_inline_atomics
)
5112 /* Expand the operands. */
5113 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5114 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5116 return expand_atomic_exchange (target
, mem
, val
, model
);
5119 /* Expand the __atomic_compare_exchange intrinsic:
5120 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5121 TYPE desired, BOOL weak,
5122 enum memmodel success,
5123 enum memmodel failure)
5124 EXP is the CALL_EXPR.
5125 TARGET is an optional place for us to store the results. */
5128 expand_builtin_atomic_compare_exchange (machine_mode mode
, tree exp
,
5131 rtx expect
, desired
, mem
, oldval
;
5132 rtx_code_label
*label
;
5133 enum memmodel success
, failure
;
5137 = expansion_point_location_if_in_system_header (input_location
);
5139 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
5140 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
5142 if (failure
> success
)
5144 warning_at (loc
, OPT_Winvalid_memory_model
,
5145 "failure memory model cannot be stronger than success "
5146 "memory model for %<__atomic_compare_exchange%>");
5147 success
= MEMMODEL_SEQ_CST
;
5150 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
5152 warning_at (loc
, OPT_Winvalid_memory_model
,
5153 "invalid failure memory model for "
5154 "%<__atomic_compare_exchange%>");
5155 failure
= MEMMODEL_SEQ_CST
;
5156 success
= MEMMODEL_SEQ_CST
;
5160 if (!flag_inline_atomics
)
5163 /* Expand the operands. */
5164 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5166 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
5167 expect
= convert_memory_address (Pmode
, expect
);
5168 expect
= gen_rtx_MEM (mode
, expect
);
5169 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5171 weak
= CALL_EXPR_ARG (exp
, 3);
5173 if (tree_fits_shwi_p (weak
) && tree_to_shwi (weak
) != 0)
5176 if (target
== const0_rtx
)
5179 /* Lest the rtl backend create a race condition with an imporoper store
5180 to memory, always create a new pseudo for OLDVAL. */
5183 if (!expand_atomic_compare_and_swap (&target
, &oldval
, mem
, expect
, desired
,
5184 is_weak
, success
, failure
))
5187 /* Conditionally store back to EXPECT, lest we create a race condition
5188 with an improper store to memory. */
5189 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5190 the normal case where EXPECT is totally private, i.e. a register. At
5191 which point the store can be unconditional. */
5192 label
= gen_label_rtx ();
5193 emit_cmp_and_jump_insns (target
, const0_rtx
, NE
, NULL
,
5194 GET_MODE (target
), 1, label
);
5195 emit_move_insn (expect
, oldval
);
5201 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5202 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5203 call. The weak parameter must be dropped to match the expected parameter
5204 list and the expected argument changed from value to pointer to memory
5208 expand_ifn_atomic_compare_exchange_into_call (gcall
*call
, machine_mode mode
)
5211 vec
<tree
, va_gc
> *vec
;
5214 vec
->quick_push (gimple_call_arg (call
, 0));
5215 tree expected
= gimple_call_arg (call
, 1);
5216 rtx x
= assign_stack_temp_for_type (mode
, GET_MODE_SIZE (mode
),
5217 TREE_TYPE (expected
));
5218 rtx expd
= expand_expr (expected
, x
, mode
, EXPAND_NORMAL
);
5220 emit_move_insn (x
, expd
);
5221 tree v
= make_tree (TREE_TYPE (expected
), x
);
5222 vec
->quick_push (build1 (ADDR_EXPR
,
5223 build_pointer_type (TREE_TYPE (expected
)), v
));
5224 vec
->quick_push (gimple_call_arg (call
, 2));
5225 /* Skip the boolean weak parameter. */
5226 for (z
= 4; z
< 6; z
++)
5227 vec
->quick_push (gimple_call_arg (call
, z
));
5228 built_in_function fncode
5229 = (built_in_function
) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5230 + exact_log2 (GET_MODE_SIZE (mode
)));
5231 tree fndecl
= builtin_decl_explicit (fncode
);
5232 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fndecl
)),
5234 tree exp
= build_call_vec (boolean_type_node
, fn
, vec
);
5235 tree lhs
= gimple_call_lhs (call
);
5236 rtx boolret
= expand_call (exp
, NULL_RTX
, lhs
== NULL_TREE
);
5239 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5240 if (GET_MODE (boolret
) != mode
)
5241 boolret
= convert_modes (mode
, GET_MODE (boolret
), boolret
, 1);
5242 x
= force_reg (mode
, x
);
5243 write_complex_part (target
, boolret
, true);
5244 write_complex_part (target
, x
, false);
5248 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5251 expand_ifn_atomic_compare_exchange (gcall
*call
)
5253 int size
= tree_to_shwi (gimple_call_arg (call
, 3)) & 255;
5254 gcc_assert (size
== 1 || size
== 2 || size
== 4 || size
== 8 || size
== 16);
5255 machine_mode mode
= mode_for_size (BITS_PER_UNIT
* size
, MODE_INT
, 0);
5256 rtx expect
, desired
, mem
, oldval
, boolret
;
5257 enum memmodel success
, failure
;
5261 = expansion_point_location_if_in_system_header (gimple_location (call
));
5263 success
= get_memmodel (gimple_call_arg (call
, 4));
5264 failure
= get_memmodel (gimple_call_arg (call
, 5));
5266 if (failure
> success
)
5268 warning_at (loc
, OPT_Winvalid_memory_model
,
5269 "failure memory model cannot be stronger than success "
5270 "memory model for %<__atomic_compare_exchange%>");
5271 success
= MEMMODEL_SEQ_CST
;
5274 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
5276 warning_at (loc
, OPT_Winvalid_memory_model
,
5277 "invalid failure memory model for "
5278 "%<__atomic_compare_exchange%>");
5279 failure
= MEMMODEL_SEQ_CST
;
5280 success
= MEMMODEL_SEQ_CST
;
5283 if (!flag_inline_atomics
)
5285 expand_ifn_atomic_compare_exchange_into_call (call
, mode
);
5289 /* Expand the operands. */
5290 mem
= get_builtin_sync_mem (gimple_call_arg (call
, 0), mode
);
5292 expect
= expand_expr_force_mode (gimple_call_arg (call
, 1), mode
);
5293 desired
= expand_expr_force_mode (gimple_call_arg (call
, 2), mode
);
5295 is_weak
= (tree_to_shwi (gimple_call_arg (call
, 3)) & 256) != 0;
5300 if (!expand_atomic_compare_and_swap (&boolret
, &oldval
, mem
, expect
, desired
,
5301 is_weak
, success
, failure
))
5303 expand_ifn_atomic_compare_exchange_into_call (call
, mode
);
5307 lhs
= gimple_call_lhs (call
);
5310 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5311 if (GET_MODE (boolret
) != mode
)
5312 boolret
= convert_modes (mode
, GET_MODE (boolret
), boolret
, 1);
5313 write_complex_part (target
, boolret
, true);
5314 write_complex_part (target
, oldval
, false);
5318 /* Expand the __atomic_load intrinsic:
5319 TYPE __atomic_load (TYPE *object, enum memmodel)
5320 EXP is the CALL_EXPR.
5321 TARGET is an optional place for us to store the results. */
5324 expand_builtin_atomic_load (machine_mode mode
, tree exp
, rtx target
)
5327 enum memmodel model
;
5329 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5330 if (is_mm_release (model
) || is_mm_acq_rel (model
))
5333 = expansion_point_location_if_in_system_header (input_location
);
5334 warning_at (loc
, OPT_Winvalid_memory_model
,
5335 "invalid memory model for %<__atomic_load%>");
5336 model
= MEMMODEL_SEQ_CST
;
5339 if (!flag_inline_atomics
)
5342 /* Expand the operand. */
5343 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5345 return expand_atomic_load (target
, mem
, model
);
5349 /* Expand the __atomic_store intrinsic:
5350 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5351 EXP is the CALL_EXPR.
5352 TARGET is an optional place for us to store the results. */
5355 expand_builtin_atomic_store (machine_mode mode
, tree exp
)
5358 enum memmodel model
;
5360 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5361 if (!(is_mm_relaxed (model
) || is_mm_seq_cst (model
)
5362 || is_mm_release (model
)))
5365 = expansion_point_location_if_in_system_header (input_location
);
5366 warning_at (loc
, OPT_Winvalid_memory_model
,
5367 "invalid memory model for %<__atomic_store%>");
5368 model
= MEMMODEL_SEQ_CST
;
5371 if (!flag_inline_atomics
)
5374 /* Expand the operands. */
5375 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5376 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5378 return expand_atomic_store (mem
, val
, model
, false);
5381 /* Expand the __atomic_fetch_XXX intrinsic:
5382 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5383 EXP is the CALL_EXPR.
5384 TARGET is an optional place for us to store the results.
5385 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5386 FETCH_AFTER is true if returning the result of the operation.
5387 FETCH_AFTER is false if returning the value before the operation.
5388 IGNORE is true if the result is not used.
5389 EXT_CALL is the correct builtin for an external call if this cannot be
5390 resolved to an instruction sequence. */
5393 expand_builtin_atomic_fetch_op (machine_mode mode
, tree exp
, rtx target
,
5394 enum rtx_code code
, bool fetch_after
,
5395 bool ignore
, enum built_in_function ext_call
)
5398 enum memmodel model
;
5402 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5404 /* Expand the operands. */
5405 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5406 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5408 /* Only try generating instructions if inlining is turned on. */
5409 if (flag_inline_atomics
)
5411 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
5416 /* Return if a different routine isn't needed for the library call. */
5417 if (ext_call
== BUILT_IN_NONE
)
5420 /* Change the call to the specified function. */
5421 fndecl
= get_callee_fndecl (exp
);
5422 addr
= CALL_EXPR_FN (exp
);
5425 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
5426 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
5428 /* Expand the call here so we can emit trailing code. */
5429 ret
= expand_call (exp
, target
, ignore
);
5431 /* Replace the original function just in case it matters. */
5432 TREE_OPERAND (addr
, 0) = fndecl
;
5434 /* Then issue the arithmetic correction to return the right result. */
5439 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
5441 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
5444 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
5450 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
5453 expand_ifn_atomic_bit_test_and (gcall
*call
)
5455 tree ptr
= gimple_call_arg (call
, 0);
5456 tree bit
= gimple_call_arg (call
, 1);
5457 tree flag
= gimple_call_arg (call
, 2);
5458 tree lhs
= gimple_call_lhs (call
);
5459 enum memmodel model
= MEMMODEL_SYNC_SEQ_CST
;
5460 machine_mode mode
= TYPE_MODE (TREE_TYPE (flag
));
5463 struct expand_operand ops
[5];
5465 gcc_assert (flag_inline_atomics
);
5467 if (gimple_call_num_args (call
) == 4)
5468 model
= get_memmodel (gimple_call_arg (call
, 3));
5470 rtx mem
= get_builtin_sync_mem (ptr
, mode
);
5471 rtx val
= expand_expr_force_mode (bit
, mode
);
5473 switch (gimple_call_internal_fn (call
))
5475 case IFN_ATOMIC_BIT_TEST_AND_SET
:
5477 optab
= atomic_bit_test_and_set_optab
;
5479 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT
:
5481 optab
= atomic_bit_test_and_complement_optab
;
5483 case IFN_ATOMIC_BIT_TEST_AND_RESET
:
5485 optab
= atomic_bit_test_and_reset_optab
;
5491 if (lhs
== NULL_TREE
)
5493 val
= expand_simple_binop (mode
, ASHIFT
, const1_rtx
,
5494 val
, NULL_RTX
, true, OPTAB_DIRECT
);
5496 val
= expand_simple_unop (mode
, NOT
, val
, NULL_RTX
, true);
5497 expand_atomic_fetch_op (const0_rtx
, mem
, val
, code
, model
, false);
5501 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5502 enum insn_code icode
= direct_optab_handler (optab
, mode
);
5503 gcc_assert (icode
!= CODE_FOR_nothing
);
5504 create_output_operand (&ops
[0], target
, mode
);
5505 create_fixed_operand (&ops
[1], mem
);
5506 create_convert_operand_to (&ops
[2], val
, mode
, true);
5507 create_integer_operand (&ops
[3], model
);
5508 create_integer_operand (&ops
[4], integer_onep (flag
));
5509 if (maybe_expand_insn (icode
, 5, ops
))
5513 val
= expand_simple_binop (mode
, ASHIFT
, const1_rtx
,
5514 val
, NULL_RTX
, true, OPTAB_DIRECT
);
5517 val
= expand_simple_unop (mode
, NOT
, val
, NULL_RTX
, true);
5518 rtx result
= expand_atomic_fetch_op (gen_reg_rtx (mode
), mem
, val
,
5519 code
, model
, false);
5520 if (integer_onep (flag
))
5522 result
= expand_simple_binop (mode
, ASHIFTRT
, result
, bitval
,
5523 NULL_RTX
, true, OPTAB_DIRECT
);
5524 result
= expand_simple_binop (mode
, AND
, result
, const1_rtx
, target
,
5525 true, OPTAB_DIRECT
);
5528 result
= expand_simple_binop (mode
, AND
, result
, maskval
, target
, true,
5530 if (result
!= target
)
5531 emit_move_insn (target
, result
);
5534 /* Expand an atomic clear operation.
5535 void _atomic_clear (BOOL *obj, enum memmodel)
5536 EXP is the call expression. */
5539 expand_builtin_atomic_clear (tree exp
)
5543 enum memmodel model
;
5545 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5546 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5547 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5549 if (is_mm_consume (model
) || is_mm_acquire (model
) || is_mm_acq_rel (model
))
5552 = expansion_point_location_if_in_system_header (input_location
);
5553 warning_at (loc
, OPT_Winvalid_memory_model
,
5554 "invalid memory model for %<__atomic_store%>");
5555 model
= MEMMODEL_SEQ_CST
;
5558 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5559 Failing that, a store is issued by __atomic_store. The only way this can
5560 fail is if the bool type is larger than a word size. Unlikely, but
5561 handle it anyway for completeness. Assume a single threaded model since
5562 there is no atomic support in this case, and no barriers are required. */
5563 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
5565 emit_move_insn (mem
, const0_rtx
);
5569 /* Expand an atomic test_and_set operation.
5570 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5571 EXP is the call expression. */
5574 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
5577 enum memmodel model
;
5580 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5581 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5582 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5584 return expand_atomic_test_and_set (target
, mem
, model
);
5588 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5589 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5592 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
5596 unsigned int mode_align
, type_align
;
5598 if (TREE_CODE (arg0
) != INTEGER_CST
)
5601 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
5602 mode
= mode_for_size (size
, MODE_INT
, 0);
5603 mode_align
= GET_MODE_ALIGNMENT (mode
);
5605 if (TREE_CODE (arg1
) == INTEGER_CST
)
5607 unsigned HOST_WIDE_INT val
= UINTVAL (expand_normal (arg1
));
5609 /* Either this argument is null, or it's a fake pointer encoding
5610 the alignment of the object. */
5611 val
= least_bit_hwi (val
);
5612 val
*= BITS_PER_UNIT
;
5614 if (val
== 0 || mode_align
< val
)
5615 type_align
= mode_align
;
5621 tree ttype
= TREE_TYPE (arg1
);
5623 /* This function is usually invoked and folded immediately by the front
5624 end before anything else has a chance to look at it. The pointer
5625 parameter at this point is usually cast to a void *, so check for that
5626 and look past the cast. */
5627 if (CONVERT_EXPR_P (arg1
)
5628 && POINTER_TYPE_P (ttype
)
5629 && VOID_TYPE_P (TREE_TYPE (ttype
))
5630 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
5631 arg1
= TREE_OPERAND (arg1
, 0);
5633 ttype
= TREE_TYPE (arg1
);
5634 gcc_assert (POINTER_TYPE_P (ttype
));
5636 /* Get the underlying type of the object. */
5637 ttype
= TREE_TYPE (ttype
);
5638 type_align
= TYPE_ALIGN (ttype
);
5641 /* If the object has smaller alignment, the lock free routines cannot
5643 if (type_align
< mode_align
)
5644 return boolean_false_node
;
5646 /* Check if a compare_and_swap pattern exists for the mode which represents
5647 the required size. The pattern is not allowed to fail, so the existence
5648 of the pattern indicates support is present. */
5649 if (can_compare_and_swap_p (mode
, true))
5650 return boolean_true_node
;
5652 return boolean_false_node
;
5655 /* Return true if the parameters to call EXP represent an object which will
5656 always generate lock free instructions. The first argument represents the
5657 size of the object, and the second parameter is a pointer to the object
5658 itself. If NULL is passed for the object, then the result is based on
5659 typical alignment for an object of the specified size. Otherwise return
5663 expand_builtin_atomic_always_lock_free (tree exp
)
5666 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5667 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5669 if (TREE_CODE (arg0
) != INTEGER_CST
)
5671 error ("non-constant argument 1 to __atomic_always_lock_free");
5675 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
5676 if (size
== boolean_true_node
)
5681 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5682 is lock free on this architecture. */
5685 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
5687 if (!flag_inline_atomics
)
5690 /* If it isn't always lock free, don't generate a result. */
5691 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
5692 return boolean_true_node
;
5697 /* Return true if the parameters to call EXP represent an object which will
5698 always generate lock free instructions. The first argument represents the
5699 size of the object, and the second parameter is a pointer to the object
5700 itself. If NULL is passed for the object, then the result is based on
5701 typical alignment for an object of the specified size. Otherwise return
5705 expand_builtin_atomic_is_lock_free (tree exp
)
5708 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5709 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5711 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
5713 error ("non-integer argument 1 to __atomic_is_lock_free");
5717 if (!flag_inline_atomics
)
5720 /* If the value is known at compile time, return the RTX for it. */
5721 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
5722 if (size
== boolean_true_node
)
5728 /* Expand the __atomic_thread_fence intrinsic:
5729 void __atomic_thread_fence (enum memmodel)
5730 EXP is the CALL_EXPR. */
5733 expand_builtin_atomic_thread_fence (tree exp
)
5735 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5736 expand_mem_thread_fence (model
);
5739 /* Expand the __atomic_signal_fence intrinsic:
5740 void __atomic_signal_fence (enum memmodel)
5741 EXP is the CALL_EXPR. */
5744 expand_builtin_atomic_signal_fence (tree exp
)
5746 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5747 expand_mem_signal_fence (model
);
5750 /* Expand the __sync_synchronize intrinsic. */
5753 expand_builtin_sync_synchronize (void)
5755 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST
);
5759 expand_builtin_thread_pointer (tree exp
, rtx target
)
5761 enum insn_code icode
;
5762 if (!validate_arglist (exp
, VOID_TYPE
))
5764 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
5765 if (icode
!= CODE_FOR_nothing
)
5767 struct expand_operand op
;
5768 /* If the target is not sutitable then create a new target. */
5769 if (target
== NULL_RTX
5771 || GET_MODE (target
) != Pmode
)
5772 target
= gen_reg_rtx (Pmode
);
5773 create_output_operand (&op
, target
, Pmode
);
5774 expand_insn (icode
, 1, &op
);
5777 error ("__builtin_thread_pointer is not supported on this target");
5782 expand_builtin_set_thread_pointer (tree exp
)
5784 enum insn_code icode
;
5785 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5787 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
5788 if (icode
!= CODE_FOR_nothing
)
5790 struct expand_operand op
;
5791 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
5792 Pmode
, EXPAND_NORMAL
);
5793 create_input_operand (&op
, val
, Pmode
);
5794 expand_insn (icode
, 1, &op
);
5797 error ("__builtin_set_thread_pointer is not supported on this target");
5801 /* Emit code to restore the current value of stack. */
5804 expand_stack_restore (tree var
)
5807 rtx sa
= expand_normal (var
);
5809 sa
= convert_memory_address (Pmode
, sa
);
5811 prev
= get_last_insn ();
5812 emit_stack_restore (SAVE_BLOCK
, sa
);
5814 record_new_stack_level ();
5816 fixup_args_size_notes (prev
, get_last_insn (), 0);
5819 /* Emit code to save the current value of stack. */
5822 expand_stack_save (void)
5826 emit_stack_save (SAVE_BLOCK
, &ret
);
5831 /* Expand an expression EXP that calls a built-in function,
5832 with result going to TARGET if that's convenient
5833 (and in mode MODE if that's convenient).
5834 SUBTARGET may be used as the target for computing one of EXP's operands.
5835 IGNORE is nonzero if the value is to be ignored. */
5838 expand_builtin (tree exp
, rtx target
, rtx subtarget
, machine_mode mode
,
5841 tree fndecl
= get_callee_fndecl (exp
);
5842 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5843 machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
5846 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5847 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
5849 /* When ASan is enabled, we don't want to expand some memory/string
5850 builtins and rely on libsanitizer's hooks. This allows us to avoid
5851 redundant checks and be sure, that possible overflow will be detected
5854 if ((flag_sanitize
& SANITIZE_ADDRESS
) && asan_intercepted_p (fcode
))
5855 return expand_call (exp
, target
, ignore
);
5857 /* When not optimizing, generate calls to library functions for a certain
5860 && !called_as_built_in (fndecl
)
5861 && fcode
!= BUILT_IN_FORK
5862 && fcode
!= BUILT_IN_EXECL
5863 && fcode
!= BUILT_IN_EXECV
5864 && fcode
!= BUILT_IN_EXECLP
5865 && fcode
!= BUILT_IN_EXECLE
5866 && fcode
!= BUILT_IN_EXECVP
5867 && fcode
!= BUILT_IN_EXECVE
5868 && fcode
!= BUILT_IN_ALLOCA
5869 && fcode
!= BUILT_IN_ALLOCA_WITH_ALIGN
5870 && fcode
!= BUILT_IN_FREE
5871 && fcode
!= BUILT_IN_CHKP_SET_PTR_BOUNDS
5872 && fcode
!= BUILT_IN_CHKP_INIT_PTR_BOUNDS
5873 && fcode
!= BUILT_IN_CHKP_NULL_PTR_BOUNDS
5874 && fcode
!= BUILT_IN_CHKP_COPY_PTR_BOUNDS
5875 && fcode
!= BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5876 && fcode
!= BUILT_IN_CHKP_STORE_PTR_BOUNDS
5877 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5878 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5879 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5880 && fcode
!= BUILT_IN_CHKP_GET_PTR_LBOUND
5881 && fcode
!= BUILT_IN_CHKP_GET_PTR_UBOUND
5882 && fcode
!= BUILT_IN_CHKP_BNDRET
)
5883 return expand_call (exp
, target
, ignore
);
5885 /* The built-in function expanders test for target == const0_rtx
5886 to determine whether the function's result will be ignored. */
5888 target
= const0_rtx
;
5890 /* If the result of a pure or const built-in function is ignored, and
5891 none of its arguments are volatile, we can avoid expanding the
5892 built-in call and just evaluate the arguments for side-effects. */
5893 if (target
== const0_rtx
5894 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
5895 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
5897 bool volatilep
= false;
5899 call_expr_arg_iterator iter
;
5901 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5902 if (TREE_THIS_VOLATILE (arg
))
5910 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5911 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5916 /* expand_builtin_with_bounds is supposed to be used for
5917 instrumented builtin calls. */
5918 gcc_assert (!CALL_WITH_BOUNDS_P (exp
));
5922 CASE_FLT_FN (BUILT_IN_FABS
):
5923 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
5924 case BUILT_IN_FABSD32
:
5925 case BUILT_IN_FABSD64
:
5926 case BUILT_IN_FABSD128
:
5927 target
= expand_builtin_fabs (exp
, target
, subtarget
);
5932 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
5933 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN
):
5934 target
= expand_builtin_copysign (exp
, target
, subtarget
);
5939 /* Just do a normal library call if we were unable to fold
5941 CASE_FLT_FN (BUILT_IN_CABS
):
5944 CASE_FLT_FN (BUILT_IN_FMA
):
5945 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
5950 CASE_FLT_FN (BUILT_IN_ILOGB
):
5951 if (! flag_unsafe_math_optimizations
)
5954 CASE_FLT_FN (BUILT_IN_ISINF
):
5955 CASE_FLT_FN (BUILT_IN_FINITE
):
5956 case BUILT_IN_ISFINITE
:
5957 case BUILT_IN_ISNORMAL
:
5958 target
= expand_builtin_interclass_mathfn (exp
, target
);
5963 CASE_FLT_FN (BUILT_IN_ICEIL
):
5964 CASE_FLT_FN (BUILT_IN_LCEIL
):
5965 CASE_FLT_FN (BUILT_IN_LLCEIL
):
5966 CASE_FLT_FN (BUILT_IN_LFLOOR
):
5967 CASE_FLT_FN (BUILT_IN_IFLOOR
):
5968 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
5969 target
= expand_builtin_int_roundingfn (exp
, target
);
5974 CASE_FLT_FN (BUILT_IN_IRINT
):
5975 CASE_FLT_FN (BUILT_IN_LRINT
):
5976 CASE_FLT_FN (BUILT_IN_LLRINT
):
5977 CASE_FLT_FN (BUILT_IN_IROUND
):
5978 CASE_FLT_FN (BUILT_IN_LROUND
):
5979 CASE_FLT_FN (BUILT_IN_LLROUND
):
5980 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
5985 CASE_FLT_FN (BUILT_IN_POWI
):
5986 target
= expand_builtin_powi (exp
, target
);
5991 CASE_FLT_FN (BUILT_IN_CEXPI
):
5992 target
= expand_builtin_cexpi (exp
, target
);
5993 gcc_assert (target
);
5996 CASE_FLT_FN (BUILT_IN_SIN
):
5997 CASE_FLT_FN (BUILT_IN_COS
):
5998 if (! flag_unsafe_math_optimizations
)
6000 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
6005 CASE_FLT_FN (BUILT_IN_SINCOS
):
6006 if (! flag_unsafe_math_optimizations
)
6008 target
= expand_builtin_sincos (exp
);
6013 case BUILT_IN_APPLY_ARGS
:
6014 return expand_builtin_apply_args ();
6016 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6017 FUNCTION with a copy of the parameters described by
6018 ARGUMENTS, and ARGSIZE. It returns a block of memory
6019 allocated on the stack into which is stored all the registers
6020 that might possibly be used for returning the result of a
6021 function. ARGUMENTS is the value returned by
6022 __builtin_apply_args. ARGSIZE is the number of bytes of
6023 arguments that must be copied. ??? How should this value be
6024 computed? We'll also need a safe worst case value for varargs
6026 case BUILT_IN_APPLY
:
6027 if (!validate_arglist (exp
, POINTER_TYPE
,
6028 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
6029 && !validate_arglist (exp
, REFERENCE_TYPE
,
6030 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6036 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
6037 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
6038 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
6040 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6043 /* __builtin_return (RESULT) causes the function to return the
6044 value described by RESULT. RESULT is address of the block of
6045 memory returned by __builtin_apply. */
6046 case BUILT_IN_RETURN
:
6047 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6048 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6051 case BUILT_IN_SAVEREGS
:
6052 return expand_builtin_saveregs ();
6054 case BUILT_IN_VA_ARG_PACK
:
6055 /* All valid uses of __builtin_va_arg_pack () are removed during
6057 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
6060 case BUILT_IN_VA_ARG_PACK_LEN
:
6061 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6063 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
6066 /* Return the address of the first anonymous stack arg. */
6067 case BUILT_IN_NEXT_ARG
:
6068 if (fold_builtin_next_arg (exp
, false))
6070 return expand_builtin_next_arg ();
6072 case BUILT_IN_CLEAR_CACHE
:
6073 target
= expand_builtin___clear_cache (exp
);
6078 case BUILT_IN_CLASSIFY_TYPE
:
6079 return expand_builtin_classify_type (exp
);
6081 case BUILT_IN_CONSTANT_P
:
6084 case BUILT_IN_FRAME_ADDRESS
:
6085 case BUILT_IN_RETURN_ADDRESS
:
6086 return expand_builtin_frame_address (fndecl
, exp
);
6088 /* Returns the address of the area where the structure is returned.
6090 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6091 if (call_expr_nargs (exp
) != 0
6092 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6093 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6096 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6098 case BUILT_IN_ALLOCA
:
6099 case BUILT_IN_ALLOCA_WITH_ALIGN
:
6100 /* If the allocation stems from the declaration of a variable-sized
6101 object, it cannot accumulate. */
6102 target
= expand_builtin_alloca (exp
, CALL_ALLOCA_FOR_VAR_P (exp
));
6107 case BUILT_IN_STACK_SAVE
:
6108 return expand_stack_save ();
6110 case BUILT_IN_STACK_RESTORE
:
6111 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6114 case BUILT_IN_BSWAP16
:
6115 case BUILT_IN_BSWAP32
:
6116 case BUILT_IN_BSWAP64
:
6117 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
6122 CASE_INT_FN (BUILT_IN_FFS
):
6123 target
= expand_builtin_unop (target_mode
, exp
, target
,
6124 subtarget
, ffs_optab
);
6129 CASE_INT_FN (BUILT_IN_CLZ
):
6130 target
= expand_builtin_unop (target_mode
, exp
, target
,
6131 subtarget
, clz_optab
);
6136 CASE_INT_FN (BUILT_IN_CTZ
):
6137 target
= expand_builtin_unop (target_mode
, exp
, target
,
6138 subtarget
, ctz_optab
);
6143 CASE_INT_FN (BUILT_IN_CLRSB
):
6144 target
= expand_builtin_unop (target_mode
, exp
, target
,
6145 subtarget
, clrsb_optab
);
6150 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6151 target
= expand_builtin_unop (target_mode
, exp
, target
,
6152 subtarget
, popcount_optab
);
6157 CASE_INT_FN (BUILT_IN_PARITY
):
6158 target
= expand_builtin_unop (target_mode
, exp
, target
,
6159 subtarget
, parity_optab
);
6164 case BUILT_IN_STRLEN
:
6165 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6170 case BUILT_IN_STRCPY
:
6171 target
= expand_builtin_strcpy (exp
, target
);
6176 case BUILT_IN_STRNCPY
:
6177 target
= expand_builtin_strncpy (exp
, target
);
6182 case BUILT_IN_STPCPY
:
6183 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6188 case BUILT_IN_MEMCPY
:
6189 target
= expand_builtin_memcpy (exp
, target
);
6194 case BUILT_IN_MEMPCPY
:
6195 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6200 case BUILT_IN_MEMSET
:
6201 target
= expand_builtin_memset (exp
, target
, mode
);
6206 case BUILT_IN_BZERO
:
6207 target
= expand_builtin_bzero (exp
);
6212 case BUILT_IN_STRCMP
:
6213 target
= expand_builtin_strcmp (exp
, target
);
6218 case BUILT_IN_STRNCMP
:
6219 target
= expand_builtin_strncmp (exp
, target
, mode
);
6225 case BUILT_IN_MEMCMP
:
6226 case BUILT_IN_MEMCMP_EQ
:
6227 target
= expand_builtin_memcmp (exp
, target
, fcode
== BUILT_IN_MEMCMP_EQ
);
6230 if (fcode
== BUILT_IN_MEMCMP_EQ
)
6232 tree newdecl
= builtin_decl_explicit (BUILT_IN_MEMCMP
);
6233 TREE_OPERAND (exp
, 1) = build_fold_addr_expr (newdecl
);
6237 case BUILT_IN_SETJMP
:
6238 /* This should have been lowered to the builtins below. */
6241 case BUILT_IN_SETJMP_SETUP
:
6242 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6243 and the receiver label. */
6244 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6246 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6247 VOIDmode
, EXPAND_NORMAL
);
6248 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6249 rtx_insn
*label_r
= label_rtx (label
);
6251 /* This is copied from the handling of non-local gotos. */
6252 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6253 nonlocal_goto_handler_labels
6254 = gen_rtx_INSN_LIST (VOIDmode
, label_r
,
6255 nonlocal_goto_handler_labels
);
6256 /* ??? Do not let expand_label treat us as such since we would
6257 not want to be both on the list of non-local labels and on
6258 the list of forced labels. */
6259 FORCED_LABEL (label
) = 0;
6264 case BUILT_IN_SETJMP_RECEIVER
:
6265 /* __builtin_setjmp_receiver is passed the receiver label. */
6266 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6268 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6269 rtx_insn
*label_r
= label_rtx (label
);
6271 expand_builtin_setjmp_receiver (label_r
);
6276 /* __builtin_longjmp is passed a pointer to an array of five words.
6277 It's similar to the C library longjmp function but works with
6278 __builtin_setjmp above. */
6279 case BUILT_IN_LONGJMP
:
6280 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6282 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6283 VOIDmode
, EXPAND_NORMAL
);
6284 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6286 if (value
!= const1_rtx
)
6288 error ("%<__builtin_longjmp%> second argument must be 1");
6292 expand_builtin_longjmp (buf_addr
, value
);
6297 case BUILT_IN_NONLOCAL_GOTO
:
6298 target
= expand_builtin_nonlocal_goto (exp
);
6303 /* This updates the setjmp buffer that is its argument with the value
6304 of the current stack pointer. */
6305 case BUILT_IN_UPDATE_SETJMP_BUF
:
6306 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6309 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6311 expand_builtin_update_setjmp_buf (buf_addr
);
6317 expand_builtin_trap ();
6320 case BUILT_IN_UNREACHABLE
:
6321 expand_builtin_unreachable ();
6324 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6325 case BUILT_IN_SIGNBITD32
:
6326 case BUILT_IN_SIGNBITD64
:
6327 case BUILT_IN_SIGNBITD128
:
6328 target
= expand_builtin_signbit (exp
, target
);
6333 /* Various hooks for the DWARF 2 __throw routine. */
6334 case BUILT_IN_UNWIND_INIT
:
6335 expand_builtin_unwind_init ();
6337 case BUILT_IN_DWARF_CFA
:
6338 return virtual_cfa_rtx
;
6339 #ifdef DWARF2_UNWIND_INFO
6340 case BUILT_IN_DWARF_SP_COLUMN
:
6341 return expand_builtin_dwarf_sp_column ();
6342 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6343 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6346 case BUILT_IN_FROB_RETURN_ADDR
:
6347 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6348 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6349 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6350 case BUILT_IN_EH_RETURN
:
6351 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6352 CALL_EXPR_ARG (exp
, 1));
6354 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6355 return expand_builtin_eh_return_data_regno (exp
);
6356 case BUILT_IN_EXTEND_POINTER
:
6357 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6358 case BUILT_IN_EH_POINTER
:
6359 return expand_builtin_eh_pointer (exp
);
6360 case BUILT_IN_EH_FILTER
:
6361 return expand_builtin_eh_filter (exp
);
6362 case BUILT_IN_EH_COPY_VALUES
:
6363 return expand_builtin_eh_copy_values (exp
);
6365 case BUILT_IN_VA_START
:
6366 return expand_builtin_va_start (exp
);
6367 case BUILT_IN_VA_END
:
6368 return expand_builtin_va_end (exp
);
6369 case BUILT_IN_VA_COPY
:
6370 return expand_builtin_va_copy (exp
);
6371 case BUILT_IN_EXPECT
:
6372 return expand_builtin_expect (exp
, target
);
6373 case BUILT_IN_ASSUME_ALIGNED
:
6374 return expand_builtin_assume_aligned (exp
, target
);
6375 case BUILT_IN_PREFETCH
:
6376 expand_builtin_prefetch (exp
);
6379 case BUILT_IN_INIT_TRAMPOLINE
:
6380 return expand_builtin_init_trampoline (exp
, true);
6381 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
6382 return expand_builtin_init_trampoline (exp
, false);
6383 case BUILT_IN_ADJUST_TRAMPOLINE
:
6384 return expand_builtin_adjust_trampoline (exp
);
6386 case BUILT_IN_INIT_DESCRIPTOR
:
6387 return expand_builtin_init_descriptor (exp
);
6388 case BUILT_IN_ADJUST_DESCRIPTOR
:
6389 return expand_builtin_adjust_descriptor (exp
);
6392 case BUILT_IN_EXECL
:
6393 case BUILT_IN_EXECV
:
6394 case BUILT_IN_EXECLP
:
6395 case BUILT_IN_EXECLE
:
6396 case BUILT_IN_EXECVP
:
6397 case BUILT_IN_EXECVE
:
6398 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6403 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
6404 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
6405 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
6406 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
6407 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
6408 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
6409 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
6414 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
6415 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
6416 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
6417 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
6418 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
6419 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
6420 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
6425 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
6426 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
6427 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
6428 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
6429 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
6430 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
6431 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
6436 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
6437 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
6438 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
6439 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
6440 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
6441 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
6442 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
6447 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
6448 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
6449 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
6450 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
6451 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
6452 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
6453 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
6458 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
6459 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
6460 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
6461 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
6462 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
6463 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
6464 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
6469 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
6470 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
6471 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
6472 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
6473 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
6474 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
6475 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
6480 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
6481 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
6482 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
6483 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
6484 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
6485 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
6486 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
6491 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
6492 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
6493 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
6494 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
6495 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
6496 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
6497 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
6502 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
6503 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
6504 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
6505 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
6506 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
6507 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
6508 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
6513 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
6514 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
6515 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
6516 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
6517 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
6518 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
6519 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
6524 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
6525 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
6526 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
6527 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
6528 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
6529 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
6530 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
6535 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
6536 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
6537 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
6538 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
6539 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
6540 if (mode
== VOIDmode
)
6541 mode
= TYPE_MODE (boolean_type_node
);
6542 if (!target
|| !register_operand (target
, mode
))
6543 target
= gen_reg_rtx (mode
);
6545 mode
= get_builtin_sync_mode
6546 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
6547 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6552 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
6553 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
6554 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
6555 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
6556 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
6557 mode
= get_builtin_sync_mode
6558 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
6559 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6564 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
6565 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
6566 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
6567 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
6568 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
6569 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
6570 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
6575 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
6576 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
6577 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
6578 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
6579 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
6580 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
6581 expand_builtin_sync_lock_release (mode
, exp
);
6584 case BUILT_IN_SYNC_SYNCHRONIZE
:
6585 expand_builtin_sync_synchronize ();
6588 case BUILT_IN_ATOMIC_EXCHANGE_1
:
6589 case BUILT_IN_ATOMIC_EXCHANGE_2
:
6590 case BUILT_IN_ATOMIC_EXCHANGE_4
:
6591 case BUILT_IN_ATOMIC_EXCHANGE_8
:
6592 case BUILT_IN_ATOMIC_EXCHANGE_16
:
6593 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
6594 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
6599 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
6600 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
6601 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
6602 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
6603 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
6605 unsigned int nargs
, z
;
6606 vec
<tree
, va_gc
> *vec
;
6609 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
6610 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
6614 /* If this is turned into an external library call, the weak parameter
6615 must be dropped to match the expected parameter list. */
6616 nargs
= call_expr_nargs (exp
);
6617 vec_alloc (vec
, nargs
- 1);
6618 for (z
= 0; z
< 3; z
++)
6619 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6620 /* Skip the boolean weak parameter. */
6621 for (z
= 4; z
< 6; z
++)
6622 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6623 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
6627 case BUILT_IN_ATOMIC_LOAD_1
:
6628 case BUILT_IN_ATOMIC_LOAD_2
:
6629 case BUILT_IN_ATOMIC_LOAD_4
:
6630 case BUILT_IN_ATOMIC_LOAD_8
:
6631 case BUILT_IN_ATOMIC_LOAD_16
:
6632 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
6633 target
= expand_builtin_atomic_load (mode
, exp
, target
);
6638 case BUILT_IN_ATOMIC_STORE_1
:
6639 case BUILT_IN_ATOMIC_STORE_2
:
6640 case BUILT_IN_ATOMIC_STORE_4
:
6641 case BUILT_IN_ATOMIC_STORE_8
:
6642 case BUILT_IN_ATOMIC_STORE_16
:
6643 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
6644 target
= expand_builtin_atomic_store (mode
, exp
);
6649 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
6650 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
6651 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
6652 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
6653 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
6655 enum built_in_function lib
;
6656 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
6657 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
6658 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
6659 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
6665 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
6666 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
6667 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
6668 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
6669 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
6671 enum built_in_function lib
;
6672 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
6673 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
6674 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
6675 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
6681 case BUILT_IN_ATOMIC_AND_FETCH_1
:
6682 case BUILT_IN_ATOMIC_AND_FETCH_2
:
6683 case BUILT_IN_ATOMIC_AND_FETCH_4
:
6684 case BUILT_IN_ATOMIC_AND_FETCH_8
:
6685 case BUILT_IN_ATOMIC_AND_FETCH_16
:
6687 enum built_in_function lib
;
6688 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
6689 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
6690 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
6691 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
6697 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
6698 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
6699 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
6700 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
6701 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
6703 enum built_in_function lib
;
6704 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
6705 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
6706 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
6707 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
6713 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
6714 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
6715 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
6716 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
6717 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
6719 enum built_in_function lib
;
6720 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
6721 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
6722 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
6723 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
6729 case BUILT_IN_ATOMIC_OR_FETCH_1
:
6730 case BUILT_IN_ATOMIC_OR_FETCH_2
:
6731 case BUILT_IN_ATOMIC_OR_FETCH_4
:
6732 case BUILT_IN_ATOMIC_OR_FETCH_8
:
6733 case BUILT_IN_ATOMIC_OR_FETCH_16
:
6735 enum built_in_function lib
;
6736 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
6737 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
6738 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
6739 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
6745 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
6746 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
6747 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
6748 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
6749 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
6750 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
6751 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
6752 ignore
, BUILT_IN_NONE
);
6757 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
6758 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
6759 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
6760 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
6761 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
6762 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
6763 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
6764 ignore
, BUILT_IN_NONE
);
6769 case BUILT_IN_ATOMIC_FETCH_AND_1
:
6770 case BUILT_IN_ATOMIC_FETCH_AND_2
:
6771 case BUILT_IN_ATOMIC_FETCH_AND_4
:
6772 case BUILT_IN_ATOMIC_FETCH_AND_8
:
6773 case BUILT_IN_ATOMIC_FETCH_AND_16
:
6774 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
6775 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
6776 ignore
, BUILT_IN_NONE
);
6781 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
6782 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
6783 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
6784 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
6785 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
6786 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
6787 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
6788 ignore
, BUILT_IN_NONE
);
6793 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
6794 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
6795 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
6796 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
6797 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
6798 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
6799 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
6800 ignore
, BUILT_IN_NONE
);
6805 case BUILT_IN_ATOMIC_FETCH_OR_1
:
6806 case BUILT_IN_ATOMIC_FETCH_OR_2
:
6807 case BUILT_IN_ATOMIC_FETCH_OR_4
:
6808 case BUILT_IN_ATOMIC_FETCH_OR_8
:
6809 case BUILT_IN_ATOMIC_FETCH_OR_16
:
6810 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
6811 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
6812 ignore
, BUILT_IN_NONE
);
6817 case BUILT_IN_ATOMIC_TEST_AND_SET
:
6818 return expand_builtin_atomic_test_and_set (exp
, target
);
6820 case BUILT_IN_ATOMIC_CLEAR
:
6821 return expand_builtin_atomic_clear (exp
);
6823 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
6824 return expand_builtin_atomic_always_lock_free (exp
);
6826 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
6827 target
= expand_builtin_atomic_is_lock_free (exp
);
6832 case BUILT_IN_ATOMIC_THREAD_FENCE
:
6833 expand_builtin_atomic_thread_fence (exp
);
6836 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
6837 expand_builtin_atomic_signal_fence (exp
);
6840 case BUILT_IN_OBJECT_SIZE
:
6841 return expand_builtin_object_size (exp
);
6843 case BUILT_IN_MEMCPY_CHK
:
6844 case BUILT_IN_MEMPCPY_CHK
:
6845 case BUILT_IN_MEMMOVE_CHK
:
6846 case BUILT_IN_MEMSET_CHK
:
6847 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
6852 case BUILT_IN_STRCPY_CHK
:
6853 case BUILT_IN_STPCPY_CHK
:
6854 case BUILT_IN_STRNCPY_CHK
:
6855 case BUILT_IN_STPNCPY_CHK
:
6856 case BUILT_IN_STRCAT_CHK
:
6857 case BUILT_IN_STRNCAT_CHK
:
6858 case BUILT_IN_SNPRINTF_CHK
:
6859 case BUILT_IN_VSNPRINTF_CHK
:
6860 maybe_emit_chk_warning (exp
, fcode
);
6863 case BUILT_IN_SPRINTF_CHK
:
6864 case BUILT_IN_VSPRINTF_CHK
:
6865 maybe_emit_sprintf_chk_warning (exp
, fcode
);
6869 if (warn_free_nonheap_object
)
6870 maybe_emit_free_warning (exp
);
6873 case BUILT_IN_THREAD_POINTER
:
6874 return expand_builtin_thread_pointer (exp
, target
);
6876 case BUILT_IN_SET_THREAD_POINTER
:
6877 expand_builtin_set_thread_pointer (exp
);
6880 case BUILT_IN_CILK_DETACH
:
6881 expand_builtin_cilk_detach (exp
);
6884 case BUILT_IN_CILK_POP_FRAME
:
6885 expand_builtin_cilk_pop_frame (exp
);
6888 case BUILT_IN_CHKP_INIT_PTR_BOUNDS
:
6889 case BUILT_IN_CHKP_NULL_PTR_BOUNDS
:
6890 case BUILT_IN_CHKP_COPY_PTR_BOUNDS
:
6891 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
:
6892 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
:
6893 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS
:
6894 case BUILT_IN_CHKP_SET_PTR_BOUNDS
:
6895 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS
:
6896 case BUILT_IN_CHKP_STORE_PTR_BOUNDS
:
6897 case BUILT_IN_CHKP_GET_PTR_LBOUND
:
6898 case BUILT_IN_CHKP_GET_PTR_UBOUND
:
6899 /* We allow user CHKP builtins if Pointer Bounds
6901 if (!chkp_function_instrumented_p (current_function_decl
))
6903 if (fcode
== BUILT_IN_CHKP_SET_PTR_BOUNDS
6904 || fcode
== BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6905 || fcode
== BUILT_IN_CHKP_INIT_PTR_BOUNDS
6906 || fcode
== BUILT_IN_CHKP_NULL_PTR_BOUNDS
6907 || fcode
== BUILT_IN_CHKP_COPY_PTR_BOUNDS
)
6908 return expand_normal (CALL_EXPR_ARG (exp
, 0));
6909 else if (fcode
== BUILT_IN_CHKP_GET_PTR_LBOUND
)
6910 return expand_normal (size_zero_node
);
6911 else if (fcode
== BUILT_IN_CHKP_GET_PTR_UBOUND
)
6912 return expand_normal (size_int (-1));
6918 case BUILT_IN_CHKP_BNDMK
:
6919 case BUILT_IN_CHKP_BNDSTX
:
6920 case BUILT_IN_CHKP_BNDCL
:
6921 case BUILT_IN_CHKP_BNDCU
:
6922 case BUILT_IN_CHKP_BNDLDX
:
6923 case BUILT_IN_CHKP_BNDRET
:
6924 case BUILT_IN_CHKP_INTERSECT
:
6925 case BUILT_IN_CHKP_NARROW
:
6926 case BUILT_IN_CHKP_EXTRACT_LOWER
:
6927 case BUILT_IN_CHKP_EXTRACT_UPPER
:
6928 /* Software implementation of Pointer Bounds Checker is NYI.
6929 Target support is required. */
6930 error ("Your target platform does not support -fcheck-pointer-bounds");
6933 case BUILT_IN_ACC_ON_DEVICE
:
6934 /* Do library call, if we failed to expand the builtin when
6938 default: /* just do library call, if unknown builtin */
6942 /* The switch statement above can drop through to cause the function
6943 to be called normally. */
6944 return expand_call (exp
, target
, ignore
);
6947 /* Similar to expand_builtin but is used for instrumented calls. */
6950 expand_builtin_with_bounds (tree exp
, rtx target
,
6951 rtx subtarget ATTRIBUTE_UNUSED
,
6952 machine_mode mode
, int ignore
)
6954 tree fndecl
= get_callee_fndecl (exp
);
6955 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
6957 gcc_assert (CALL_WITH_BOUNDS_P (exp
));
6959 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6960 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
6962 gcc_assert (fcode
> BEGIN_CHKP_BUILTINS
6963 && fcode
< END_CHKP_BUILTINS
);
6967 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
:
6968 target
= expand_builtin_memcpy_with_bounds (exp
, target
);
6973 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
:
6974 target
= expand_builtin_mempcpy_with_bounds (exp
, target
, mode
);
6979 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
:
6980 target
= expand_builtin_memset_with_bounds (exp
, target
, mode
);
6989 /* The switch statement above can drop through to cause the function
6990 to be called normally. */
6991 return expand_call (exp
, target
, ignore
);
6994 /* Determine whether a tree node represents a call to a built-in
6995 function. If the tree T is a call to a built-in function with
6996 the right number of arguments of the appropriate types, return
6997 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6998 Otherwise the return value is END_BUILTINS. */
7000 enum built_in_function
7001 builtin_mathfn_code (const_tree t
)
7003 const_tree fndecl
, arg
, parmlist
;
7004 const_tree argtype
, parmtype
;
7005 const_call_expr_arg_iterator iter
;
7007 if (TREE_CODE (t
) != CALL_EXPR
7008 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
7009 return END_BUILTINS
;
7011 fndecl
= get_callee_fndecl (t
);
7012 if (fndecl
== NULL_TREE
7013 || TREE_CODE (fndecl
) != FUNCTION_DECL
7014 || ! DECL_BUILT_IN (fndecl
)
7015 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
7016 return END_BUILTINS
;
7018 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
7019 init_const_call_expr_arg_iterator (t
, &iter
);
7020 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
7022 /* If a function doesn't take a variable number of arguments,
7023 the last element in the list will have type `void'. */
7024 parmtype
= TREE_VALUE (parmlist
);
7025 if (VOID_TYPE_P (parmtype
))
7027 if (more_const_call_expr_args_p (&iter
))
7028 return END_BUILTINS
;
7029 return DECL_FUNCTION_CODE (fndecl
);
7032 if (! more_const_call_expr_args_p (&iter
))
7033 return END_BUILTINS
;
7035 arg
= next_const_call_expr_arg (&iter
);
7036 argtype
= TREE_TYPE (arg
);
7038 if (SCALAR_FLOAT_TYPE_P (parmtype
))
7040 if (! SCALAR_FLOAT_TYPE_P (argtype
))
7041 return END_BUILTINS
;
7043 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
7045 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
7046 return END_BUILTINS
;
7048 else if (POINTER_TYPE_P (parmtype
))
7050 if (! POINTER_TYPE_P (argtype
))
7051 return END_BUILTINS
;
7053 else if (INTEGRAL_TYPE_P (parmtype
))
7055 if (! INTEGRAL_TYPE_P (argtype
))
7056 return END_BUILTINS
;
7059 return END_BUILTINS
;
7062 /* Variable-length argument list. */
7063 return DECL_FUNCTION_CODE (fndecl
);
7066 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7067 evaluate to a constant. */
7070 fold_builtin_constant_p (tree arg
)
7072 /* We return 1 for a numeric type that's known to be a constant
7073 value at compile-time or for an aggregate type that's a
7074 literal constant. */
7077 /* If we know this is a constant, emit the constant of one. */
7078 if (CONSTANT_CLASS_P (arg
)
7079 || (TREE_CODE (arg
) == CONSTRUCTOR
7080 && TREE_CONSTANT (arg
)))
7081 return integer_one_node
;
7082 if (TREE_CODE (arg
) == ADDR_EXPR
)
7084 tree op
= TREE_OPERAND (arg
, 0);
7085 if (TREE_CODE (op
) == STRING_CST
7086 || (TREE_CODE (op
) == ARRAY_REF
7087 && integer_zerop (TREE_OPERAND (op
, 1))
7088 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
7089 return integer_one_node
;
7092 /* If this expression has side effects, show we don't know it to be a
7093 constant. Likewise if it's a pointer or aggregate type since in
7094 those case we only want literals, since those are only optimized
7095 when generating RTL, not later.
7096 And finally, if we are compiling an initializer, not code, we
7097 need to return a definite result now; there's not going to be any
7098 more optimization done. */
7099 if (TREE_SIDE_EFFECTS (arg
)
7100 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
7101 || POINTER_TYPE_P (TREE_TYPE (arg
))
7103 || folding_initializer
7104 || force_folding_builtin_constant_p
)
7105 return integer_zero_node
;
7110 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7111 return it as a truthvalue. */
7114 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
,
7117 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
7119 fn
= builtin_decl_explicit (BUILT_IN_EXPECT
);
7120 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
7121 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
7122 pred_type
= TREE_VALUE (arg_types
);
7123 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
7125 pred
= fold_convert_loc (loc
, pred_type
, pred
);
7126 expected
= fold_convert_loc (loc
, expected_type
, expected
);
7127 call_expr
= build_call_expr_loc (loc
, fn
, predictor
? 3 : 2, pred
, expected
,
7130 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
7131 build_int_cst (ret_type
, 0));
7134 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7135 NULL_TREE if no simplification is possible. */
7138 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
, tree arg2
)
7140 tree inner
, fndecl
, inner_arg0
;
7141 enum tree_code code
;
7143 /* Distribute the expected value over short-circuiting operators.
7144 See through the cast from truthvalue_type_node to long. */
7146 while (CONVERT_EXPR_P (inner_arg0
)
7147 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
7148 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
7149 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
7151 /* If this is a builtin_expect within a builtin_expect keep the
7152 inner one. See through a comparison against a constant. It
7153 might have been added to create a thruthvalue. */
7156 if (COMPARISON_CLASS_P (inner
)
7157 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
7158 inner
= TREE_OPERAND (inner
, 0);
7160 if (TREE_CODE (inner
) == CALL_EXPR
7161 && (fndecl
= get_callee_fndecl (inner
))
7162 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7163 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
7167 code
= TREE_CODE (inner
);
7168 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
7170 tree op0
= TREE_OPERAND (inner
, 0);
7171 tree op1
= TREE_OPERAND (inner
, 1);
7173 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
, arg2
);
7174 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
, arg2
);
7175 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
7177 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
7180 /* If the argument isn't invariant then there's nothing else we can do. */
7181 if (!TREE_CONSTANT (inner_arg0
))
7184 /* If we expect that a comparison against the argument will fold to
7185 a constant return the constant. In practice, this means a true
7186 constant or the address of a non-weak symbol. */
7189 if (TREE_CODE (inner
) == ADDR_EXPR
)
7193 inner
= TREE_OPERAND (inner
, 0);
7195 while (TREE_CODE (inner
) == COMPONENT_REF
7196 || TREE_CODE (inner
) == ARRAY_REF
);
7197 if (VAR_OR_FUNCTION_DECL_P (inner
) && DECL_WEAK (inner
))
7201 /* Otherwise, ARG0 already has the proper type for the return value. */
7205 /* Fold a call to __builtin_classify_type with argument ARG. */
7208 fold_builtin_classify_type (tree arg
)
7211 return build_int_cst (integer_type_node
, no_type_class
);
7213 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
7216 /* Fold a call to __builtin_strlen with argument ARG. */
7219 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
7221 if (!validate_arg (arg
, POINTER_TYPE
))
7225 tree len
= c_strlen (arg
, 0);
7228 return fold_convert_loc (loc
, type
, len
);
7234 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7237 fold_builtin_inf (location_t loc
, tree type
, int warn
)
7239 REAL_VALUE_TYPE real
;
7241 /* __builtin_inff is intended to be usable to define INFINITY on all
7242 targets. If an infinity is not available, INFINITY expands "to a
7243 positive constant of type float that overflows at translation
7244 time", footnote "In this case, using INFINITY will violate the
7245 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7246 Thus we pedwarn to ensure this constraint violation is
7248 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
7249 pedwarn (loc
, 0, "target format does not support infinity");
7252 return build_real (type
, real
);
7255 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7256 NULL_TREE if no simplification can be made. */
7259 fold_builtin_sincos (location_t loc
,
7260 tree arg0
, tree arg1
, tree arg2
)
7263 tree fndecl
, call
= NULL_TREE
;
7265 if (!validate_arg (arg0
, REAL_TYPE
)
7266 || !validate_arg (arg1
, POINTER_TYPE
)
7267 || !validate_arg (arg2
, POINTER_TYPE
))
7270 type
= TREE_TYPE (arg0
);
7272 /* Calculate the result when the argument is a constant. */
7273 built_in_function fn
= mathfn_built_in_2 (type
, CFN_BUILT_IN_CEXPI
);
7274 if (fn
== END_BUILTINS
)
7277 /* Canonicalize sincos to cexpi. */
7278 if (TREE_CODE (arg0
) == REAL_CST
)
7280 tree complex_type
= build_complex_type (type
);
7281 call
= fold_const_call (as_combined_fn (fn
), complex_type
, arg0
);
7285 if (!targetm
.libc_has_function (function_c99_math_complex
)
7286 || !builtin_decl_implicit_p (fn
))
7288 fndecl
= builtin_decl_explicit (fn
);
7289 call
= build_call_expr_loc (loc
, fndecl
, 1, arg0
);
7290 call
= builtin_save_expr (call
);
7293 return build2 (COMPOUND_EXPR
, void_type_node
,
7294 build2 (MODIFY_EXPR
, void_type_node
,
7295 build_fold_indirect_ref_loc (loc
, arg1
),
7296 fold_build1_loc (loc
, IMAGPART_EXPR
, type
, call
)),
7297 build2 (MODIFY_EXPR
, void_type_node
,
7298 build_fold_indirect_ref_loc (loc
, arg2
),
7299 fold_build1_loc (loc
, REALPART_EXPR
, type
, call
)));
7302 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7303 Return NULL_TREE if no simplification can be made. */
7306 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
7308 if (!validate_arg (arg1
, POINTER_TYPE
)
7309 || !validate_arg (arg2
, POINTER_TYPE
)
7310 || !validate_arg (len
, INTEGER_TYPE
))
7313 /* If the LEN parameter is zero, return zero. */
7314 if (integer_zerop (len
))
7315 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
7318 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7319 if (operand_equal_p (arg1
, arg2
, 0))
7320 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
7322 /* If len parameter is one, return an expression corresponding to
7323 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7324 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
7326 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
7327 tree cst_uchar_ptr_node
7328 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
7331 = fold_convert_loc (loc
, integer_type_node
,
7332 build1 (INDIRECT_REF
, cst_uchar_node
,
7333 fold_convert_loc (loc
,
7337 = fold_convert_loc (loc
, integer_type_node
,
7338 build1 (INDIRECT_REF
, cst_uchar_node
,
7339 fold_convert_loc (loc
,
7342 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
7348 /* Fold a call to builtin isascii with argument ARG. */
7351 fold_builtin_isascii (location_t loc
, tree arg
)
7353 if (!validate_arg (arg
, INTEGER_TYPE
))
7357 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
7358 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
7359 build_int_cst (integer_type_node
,
7360 ~ (unsigned HOST_WIDE_INT
) 0x7f));
7361 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
7362 arg
, integer_zero_node
);
7366 /* Fold a call to builtin toascii with argument ARG. */
7369 fold_builtin_toascii (location_t loc
, tree arg
)
7371 if (!validate_arg (arg
, INTEGER_TYPE
))
7374 /* Transform toascii(c) -> (c & 0x7f). */
7375 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
7376 build_int_cst (integer_type_node
, 0x7f));
7379 /* Fold a call to builtin isdigit with argument ARG. */
7382 fold_builtin_isdigit (location_t loc
, tree arg
)
7384 if (!validate_arg (arg
, INTEGER_TYPE
))
7388 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
7389 /* According to the C standard, isdigit is unaffected by locale.
7390 However, it definitely is affected by the target character set. */
7391 unsigned HOST_WIDE_INT target_digit0
7392 = lang_hooks
.to_target_charset ('0');
7394 if (target_digit0
== 0)
7397 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
7398 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
7399 build_int_cst (unsigned_type_node
, target_digit0
));
7400 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
7401 build_int_cst (unsigned_type_node
, 9));
7405 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
7408 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
7410 if (!validate_arg (arg
, REAL_TYPE
))
7413 arg
= fold_convert_loc (loc
, type
, arg
);
7414 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
7417 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
7420 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
7422 if (!validate_arg (arg
, INTEGER_TYPE
))
7425 arg
= fold_convert_loc (loc
, type
, arg
);
7426 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
7429 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
7432 fold_builtin_fma (location_t loc
, tree arg0
, tree arg1
, tree arg2
, tree type
)
7434 /* ??? Only expand to FMA_EXPR if it's directly supported. */
7435 if (validate_arg (arg0
, REAL_TYPE
)
7436 && validate_arg (arg1
, REAL_TYPE
)
7437 && validate_arg (arg2
, REAL_TYPE
)
7438 && optab_handler (fma_optab
, TYPE_MODE (type
)) != CODE_FOR_nothing
)
7439 return fold_build3_loc (loc
, FMA_EXPR
, type
, arg0
, arg1
, arg2
);
7444 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
7447 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
7449 if (validate_arg (arg
, COMPLEX_TYPE
)
7450 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
7452 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
7456 tree new_arg
= builtin_save_expr (arg
);
7457 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
7458 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
7459 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
7466 /* Fold a call to builtin frexp, we can assume the base is 2. */
7469 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
7471 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
7476 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
7479 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
7481 /* Proceed if a valid pointer type was passed in. */
7482 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
7484 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
7490 /* For +-0, return (*exp = 0, +-0). */
7491 exp
= integer_zero_node
;
7496 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
7497 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
7500 /* Since the frexp function always expects base 2, and in
7501 GCC normalized significands are already in the range
7502 [0.5, 1.0), we have exactly what frexp wants. */
7503 REAL_VALUE_TYPE frac_rvt
= *value
;
7504 SET_REAL_EXP (&frac_rvt
, 0);
7505 frac
= build_real (rettype
, frac_rvt
);
7506 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
7513 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7514 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
7515 TREE_SIDE_EFFECTS (arg1
) = 1;
7516 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
7522 /* Fold a call to builtin modf. */
7525 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
7527 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
7532 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
7535 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
7537 /* Proceed if a valid pointer type was passed in. */
7538 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
7540 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
7541 REAL_VALUE_TYPE trunc
, frac
;
7547 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
7548 trunc
= frac
= *value
;
7551 /* For +-Inf, return (*arg1 = arg0, +-0). */
7553 frac
.sign
= value
->sign
;
7557 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
7558 real_trunc (&trunc
, VOIDmode
, value
);
7559 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
7560 /* If the original number was negative and already
7561 integral, then the fractional part is -0.0. */
7562 if (value
->sign
&& frac
.cl
== rvc_zero
)
7563 frac
.sign
= value
->sign
;
7567 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7568 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
7569 build_real (rettype
, trunc
));
7570 TREE_SIDE_EFFECTS (arg1
) = 1;
7571 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
7572 build_real (rettype
, frac
));
7578 /* Given a location LOC, an interclass builtin function decl FNDECL
7579 and its single argument ARG, return an folded expression computing
7580 the same, or NULL_TREE if we either couldn't or didn't want to fold
7581 (the latter happen if there's an RTL instruction available). */
7584 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
7588 if (!validate_arg (arg
, REAL_TYPE
))
7591 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
7594 mode
= TYPE_MODE (TREE_TYPE (arg
));
7596 bool is_ibm_extended
= MODE_COMPOSITE_P (mode
);
7598 /* If there is no optab, try generic code. */
7599 switch (DECL_FUNCTION_CODE (fndecl
))
7603 CASE_FLT_FN (BUILT_IN_ISINF
):
7605 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
7606 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
7607 tree type
= TREE_TYPE (arg
);
7611 if (is_ibm_extended
)
7613 /* NaN and Inf are encoded in the high-order double value
7614 only. The low-order value is not significant. */
7615 type
= double_type_node
;
7617 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
7619 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
7620 real_from_string (&r
, buf
);
7621 result
= build_call_expr (isgr_fn
, 2,
7622 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
7623 build_real (type
, r
));
7626 CASE_FLT_FN (BUILT_IN_FINITE
):
7627 case BUILT_IN_ISFINITE
:
7629 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
7630 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
7631 tree type
= TREE_TYPE (arg
);
7635 if (is_ibm_extended
)
7637 /* NaN and Inf are encoded in the high-order double value
7638 only. The low-order value is not significant. */
7639 type
= double_type_node
;
7641 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
7643 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
7644 real_from_string (&r
, buf
);
7645 result
= build_call_expr (isle_fn
, 2,
7646 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
7647 build_real (type
, r
));
7648 /*result = fold_build2_loc (loc, UNGT_EXPR,
7649 TREE_TYPE (TREE_TYPE (fndecl)),
7650 fold_build1_loc (loc, ABS_EXPR, type, arg),
7651 build_real (type, r));
7652 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
7653 TREE_TYPE (TREE_TYPE (fndecl)),
7657 case BUILT_IN_ISNORMAL
:
7659 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
7660 islessequal(fabs(x),DBL_MAX). */
7661 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
7662 tree type
= TREE_TYPE (arg
);
7663 tree orig_arg
, max_exp
, min_exp
;
7664 machine_mode orig_mode
= mode
;
7665 REAL_VALUE_TYPE rmax
, rmin
;
7668 orig_arg
= arg
= builtin_save_expr (arg
);
7669 if (is_ibm_extended
)
7671 /* Use double to test the normal range of IBM extended
7672 precision. Emin for IBM extended precision is
7673 different to emin for IEEE double, being 53 higher
7674 since the low double exponent is at least 53 lower
7675 than the high double exponent. */
7676 type
= double_type_node
;
7678 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
7680 arg
= fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
7682 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
7683 real_from_string (&rmax
, buf
);
7684 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (orig_mode
)->emin
- 1);
7685 real_from_string (&rmin
, buf
);
7686 max_exp
= build_real (type
, rmax
);
7687 min_exp
= build_real (type
, rmin
);
7689 max_exp
= build_call_expr (isle_fn
, 2, arg
, max_exp
);
7690 if (is_ibm_extended
)
7692 /* Testing the high end of the range is done just using
7693 the high double, using the same test as isfinite().
7694 For the subnormal end of the range we first test the
7695 high double, then if its magnitude is equal to the
7696 limit of 0x1p-969, we test whether the low double is
7697 non-zero and opposite sign to the high double. */
7698 tree
const islt_fn
= builtin_decl_explicit (BUILT_IN_ISLESS
);
7699 tree
const isgt_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
7700 tree gt_min
= build_call_expr (isgt_fn
, 2, arg
, min_exp
);
7701 tree eq_min
= fold_build2 (EQ_EXPR
, integer_type_node
,
7703 tree as_complex
= build1 (VIEW_CONVERT_EXPR
,
7704 complex_double_type_node
, orig_arg
);
7705 tree hi_dbl
= build1 (REALPART_EXPR
, type
, as_complex
);
7706 tree lo_dbl
= build1 (IMAGPART_EXPR
, type
, as_complex
);
7707 tree zero
= build_real (type
, dconst0
);
7708 tree hilt
= build_call_expr (islt_fn
, 2, hi_dbl
, zero
);
7709 tree lolt
= build_call_expr (islt_fn
, 2, lo_dbl
, zero
);
7710 tree logt
= build_call_expr (isgt_fn
, 2, lo_dbl
, zero
);
7711 tree ok_lo
= fold_build1 (TRUTH_NOT_EXPR
, integer_type_node
,
7712 fold_build3 (COND_EXPR
,
7715 eq_min
= fold_build2 (TRUTH_ANDIF_EXPR
, integer_type_node
,
7717 min_exp
= fold_build2 (TRUTH_ORIF_EXPR
, integer_type_node
,
7723 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
7724 min_exp
= build_call_expr (isge_fn
, 2, arg
, min_exp
);
7726 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
,
7737 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
7738 ARG is the argument for the call. */
7741 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
7743 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7745 if (!validate_arg (arg
, REAL_TYPE
))
7748 switch (builtin_index
)
7750 case BUILT_IN_ISINF
:
7751 if (!HONOR_INFINITIES (arg
))
7752 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
7756 case BUILT_IN_ISINF_SIGN
:
7758 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
7759 /* In a boolean context, GCC will fold the inner COND_EXPR to
7760 1. So e.g. "if (isinf_sign(x))" would be folded to just
7761 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
7762 tree signbit_fn
= builtin_decl_explicit (BUILT_IN_SIGNBIT
);
7763 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
7764 tree tmp
= NULL_TREE
;
7766 arg
= builtin_save_expr (arg
);
7768 if (signbit_fn
&& isinf_fn
)
7770 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
7771 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
7773 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
7774 signbit_call
, integer_zero_node
);
7775 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
7776 isinf_call
, integer_zero_node
);
7778 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
7779 integer_minus_one_node
, integer_one_node
);
7780 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
7788 case BUILT_IN_ISFINITE
:
7789 if (!HONOR_NANS (arg
)
7790 && !HONOR_INFINITIES (arg
))
7791 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
7795 case BUILT_IN_ISNAN
:
7796 if (!HONOR_NANS (arg
))
7797 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
7800 bool is_ibm_extended
= MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg
)));
7801 if (is_ibm_extended
)
7803 /* NaN and Inf are encoded in the high-order double value
7804 only. The low-order value is not significant. */
7805 arg
= fold_build1_loc (loc
, NOP_EXPR
, double_type_node
, arg
);
7808 arg
= builtin_save_expr (arg
);
7809 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
7816 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
7817 This builtin will generate code to return the appropriate floating
7818 point classification depending on the value of the floating point
7819 number passed in. The possible return values must be supplied as
7820 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
7821 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
7822 one floating point argument which is "type generic". */
7825 fold_builtin_fpclassify (location_t loc
, tree
*args
, int nargs
)
7827 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
7828 arg
, type
, res
, tmp
;
7833 /* Verify the required arguments in the original call. */
7835 || !validate_arg (args
[0], INTEGER_TYPE
)
7836 || !validate_arg (args
[1], INTEGER_TYPE
)
7837 || !validate_arg (args
[2], INTEGER_TYPE
)
7838 || !validate_arg (args
[3], INTEGER_TYPE
)
7839 || !validate_arg (args
[4], INTEGER_TYPE
)
7840 || !validate_arg (args
[5], REAL_TYPE
))
7844 fp_infinite
= args
[1];
7845 fp_normal
= args
[2];
7846 fp_subnormal
= args
[3];
7849 type
= TREE_TYPE (arg
);
7850 mode
= TYPE_MODE (type
);
7851 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
7855 (fabs(x) == Inf ? FP_INFINITE :
7856 (fabs(x) >= DBL_MIN ? FP_NORMAL :
7857 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
7859 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
7860 build_real (type
, dconst0
));
7861 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
7862 tmp
, fp_zero
, fp_subnormal
);
7864 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
7865 real_from_string (&r
, buf
);
7866 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
7867 arg
, build_real (type
, r
));
7868 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
7870 if (HONOR_INFINITIES (mode
))
7873 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
7874 build_real (type
, r
));
7875 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
7879 if (HONOR_NANS (mode
))
7881 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
7882 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
7888 /* Fold a call to an unordered comparison function such as
7889 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
7890 being called and ARG0 and ARG1 are the arguments for the call.
7891 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
7892 the opposite of the desired result. UNORDERED_CODE is used
7893 for modes that can hold NaNs and ORDERED_CODE is used for
7897 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
7898 enum tree_code unordered_code
,
7899 enum tree_code ordered_code
)
7901 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7902 enum tree_code code
;
7904 enum tree_code code0
, code1
;
7905 tree cmp_type
= NULL_TREE
;
7907 type0
= TREE_TYPE (arg0
);
7908 type1
= TREE_TYPE (arg1
);
7910 code0
= TREE_CODE (type0
);
7911 code1
= TREE_CODE (type1
);
7913 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
7914 /* Choose the wider of two real types. */
7915 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
7917 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
7919 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
7922 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
7923 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
7925 if (unordered_code
== UNORDERED_EXPR
)
7927 if (!HONOR_NANS (arg0
))
7928 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
7929 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
7932 code
= HONOR_NANS (arg0
) ? unordered_code
: ordered_code
;
7933 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
7934 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
7937 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
7938 arithmetics if it can never overflow, or into internal functions that
7939 return both result of arithmetics and overflowed boolean flag in
7940 a complex integer result, or some other check for overflow.
7941 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
7942 checking part of that. */
7945 fold_builtin_arith_overflow (location_t loc
, enum built_in_function fcode
,
7946 tree arg0
, tree arg1
, tree arg2
)
7948 enum internal_fn ifn
= IFN_LAST
;
7949 /* The code of the expression corresponding to the type-generic
7950 built-in, or ERROR_MARK for the type-specific ones. */
7951 enum tree_code opcode
= ERROR_MARK
;
7952 bool ovf_only
= false;
7956 case BUILT_IN_ADD_OVERFLOW_P
:
7959 case BUILT_IN_ADD_OVERFLOW
:
7962 case BUILT_IN_SADD_OVERFLOW
:
7963 case BUILT_IN_SADDL_OVERFLOW
:
7964 case BUILT_IN_SADDLL_OVERFLOW
:
7965 case BUILT_IN_UADD_OVERFLOW
:
7966 case BUILT_IN_UADDL_OVERFLOW
:
7967 case BUILT_IN_UADDLL_OVERFLOW
:
7968 ifn
= IFN_ADD_OVERFLOW
;
7970 case BUILT_IN_SUB_OVERFLOW_P
:
7973 case BUILT_IN_SUB_OVERFLOW
:
7974 opcode
= MINUS_EXPR
;
7976 case BUILT_IN_SSUB_OVERFLOW
:
7977 case BUILT_IN_SSUBL_OVERFLOW
:
7978 case BUILT_IN_SSUBLL_OVERFLOW
:
7979 case BUILT_IN_USUB_OVERFLOW
:
7980 case BUILT_IN_USUBL_OVERFLOW
:
7981 case BUILT_IN_USUBLL_OVERFLOW
:
7982 ifn
= IFN_SUB_OVERFLOW
;
7984 case BUILT_IN_MUL_OVERFLOW_P
:
7987 case BUILT_IN_MUL_OVERFLOW
:
7990 case BUILT_IN_SMUL_OVERFLOW
:
7991 case BUILT_IN_SMULL_OVERFLOW
:
7992 case BUILT_IN_SMULLL_OVERFLOW
:
7993 case BUILT_IN_UMUL_OVERFLOW
:
7994 case BUILT_IN_UMULL_OVERFLOW
:
7995 case BUILT_IN_UMULLL_OVERFLOW
:
7996 ifn
= IFN_MUL_OVERFLOW
;
8002 /* For the "generic" overloads, the first two arguments can have different
8003 types and the last argument determines the target type to use to check
8004 for overflow. The arguments of the other overloads all have the same
8006 tree type
= ovf_only
? TREE_TYPE (arg2
) : TREE_TYPE (TREE_TYPE (arg2
));
8008 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8009 arguments are constant, attempt to fold the built-in call into a constant
8010 expression indicating whether or not it detected an overflow. */
8012 && TREE_CODE (arg0
) == INTEGER_CST
8013 && TREE_CODE (arg1
) == INTEGER_CST
)
8014 /* Perform the computation in the target type and check for overflow. */
8015 return omit_one_operand_loc (loc
, boolean_type_node
,
8016 arith_overflowed_p (opcode
, type
, arg0
, arg1
)
8017 ? boolean_true_node
: boolean_false_node
,
8020 tree ctype
= build_complex_type (type
);
8021 tree call
= build_call_expr_internal_loc (loc
, ifn
, ctype
,
8023 tree tgt
= save_expr (call
);
8024 tree intres
= build1_loc (loc
, REALPART_EXPR
, type
, tgt
);
8025 tree ovfres
= build1_loc (loc
, IMAGPART_EXPR
, type
, tgt
);
8026 ovfres
= fold_convert_loc (loc
, boolean_type_node
, ovfres
);
8029 return omit_one_operand_loc (loc
, boolean_type_node
, ovfres
, arg2
);
8031 tree mem_arg2
= build_fold_indirect_ref_loc (loc
, arg2
);
8033 = fold_build2_loc (loc
, MODIFY_EXPR
, void_type_node
, mem_arg2
, intres
);
8034 return build2_loc (loc
, COMPOUND_EXPR
, boolean_type_node
, store
, ovfres
);
8037 /* Fold a call to __builtin_FILE to a constant string. */
8040 fold_builtin_FILE (location_t loc
)
8042 if (const char *fname
= LOCATION_FILE (loc
))
8043 return build_string_literal (strlen (fname
) + 1, fname
);
8045 return build_string_literal (1, "");
8048 /* Fold a call to __builtin_FUNCTION to a constant string. */
8051 fold_builtin_FUNCTION ()
8053 if (current_function_decl
)
8055 const char *name
= IDENTIFIER_POINTER (DECL_NAME (current_function_decl
));
8056 return build_string_literal (strlen (name
) + 1, name
);
8059 return build_string_literal (1, "");
8062 /* Fold a call to __builtin_LINE to an integer constant. */
8065 fold_builtin_LINE (location_t loc
, tree type
)
8067 return build_int_cst (type
, LOCATION_LINE (loc
));
8070 /* Fold a call to built-in function FNDECL with 0 arguments.
8071 This function returns NULL_TREE if no simplification was possible. */
8074 fold_builtin_0 (location_t loc
, tree fndecl
)
8076 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8077 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
8081 return fold_builtin_FILE (loc
);
8083 case BUILT_IN_FUNCTION
:
8084 return fold_builtin_FUNCTION ();
8087 return fold_builtin_LINE (loc
, type
);
8089 CASE_FLT_FN (BUILT_IN_INF
):
8090 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF
):
8091 case BUILT_IN_INFD32
:
8092 case BUILT_IN_INFD64
:
8093 case BUILT_IN_INFD128
:
8094 return fold_builtin_inf (loc
, type
, true);
8096 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
8097 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL
):
8098 return fold_builtin_inf (loc
, type
, false);
8100 case BUILT_IN_CLASSIFY_TYPE
:
8101 return fold_builtin_classify_type (NULL_TREE
);
8109 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8110 This function returns NULL_TREE if no simplification was possible. */
8113 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
)
8115 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8116 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
8118 if (TREE_CODE (arg0
) == ERROR_MARK
)
8121 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
, arg0
))
8126 case BUILT_IN_CONSTANT_P
:
8128 tree val
= fold_builtin_constant_p (arg0
);
8130 /* Gimplification will pull the CALL_EXPR for the builtin out of
8131 an if condition. When not optimizing, we'll not CSE it back.
8132 To avoid link error types of regressions, return false now. */
8133 if (!val
&& !optimize
)
8134 val
= integer_zero_node
;
8139 case BUILT_IN_CLASSIFY_TYPE
:
8140 return fold_builtin_classify_type (arg0
);
8142 case BUILT_IN_STRLEN
:
8143 return fold_builtin_strlen (loc
, type
, arg0
);
8145 CASE_FLT_FN (BUILT_IN_FABS
):
8146 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
8147 case BUILT_IN_FABSD32
:
8148 case BUILT_IN_FABSD64
:
8149 case BUILT_IN_FABSD128
:
8150 return fold_builtin_fabs (loc
, arg0
, type
);
8154 case BUILT_IN_LLABS
:
8155 case BUILT_IN_IMAXABS
:
8156 return fold_builtin_abs (loc
, arg0
, type
);
8158 CASE_FLT_FN (BUILT_IN_CONJ
):
8159 if (validate_arg (arg0
, COMPLEX_TYPE
)
8160 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8161 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
8164 CASE_FLT_FN (BUILT_IN_CREAL
):
8165 if (validate_arg (arg0
, COMPLEX_TYPE
)
8166 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8167 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));
8170 CASE_FLT_FN (BUILT_IN_CIMAG
):
8171 if (validate_arg (arg0
, COMPLEX_TYPE
)
8172 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8173 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
8176 CASE_FLT_FN (BUILT_IN_CARG
):
8177 return fold_builtin_carg (loc
, arg0
, type
);
8179 case BUILT_IN_ISASCII
:
8180 return fold_builtin_isascii (loc
, arg0
);
8182 case BUILT_IN_TOASCII
:
8183 return fold_builtin_toascii (loc
, arg0
);
8185 case BUILT_IN_ISDIGIT
:
8186 return fold_builtin_isdigit (loc
, arg0
);
8188 CASE_FLT_FN (BUILT_IN_FINITE
):
8189 case BUILT_IN_FINITED32
:
8190 case BUILT_IN_FINITED64
:
8191 case BUILT_IN_FINITED128
:
8192 case BUILT_IN_ISFINITE
:
8194 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
8197 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
8200 CASE_FLT_FN (BUILT_IN_ISINF
):
8201 case BUILT_IN_ISINFD32
:
8202 case BUILT_IN_ISINFD64
:
8203 case BUILT_IN_ISINFD128
:
8205 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
8208 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
8211 case BUILT_IN_ISNORMAL
:
8212 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
8214 case BUILT_IN_ISINF_SIGN
:
8215 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
8217 CASE_FLT_FN (BUILT_IN_ISNAN
):
8218 case BUILT_IN_ISNAND32
:
8219 case BUILT_IN_ISNAND64
:
8220 case BUILT_IN_ISNAND128
:
8221 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
8224 if (integer_zerop (arg0
))
8225 return build_empty_stmt (loc
);
8236 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8237 This function returns NULL_TREE if no simplification was possible. */
8240 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
)
8242 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8243 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
8245 if (TREE_CODE (arg0
) == ERROR_MARK
8246 || TREE_CODE (arg1
) == ERROR_MARK
)
8249 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
, arg0
, arg1
))
8254 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
8255 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
8256 if (validate_arg (arg0
, REAL_TYPE
)
8257 && validate_arg (arg1
, POINTER_TYPE
))
8258 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
8261 CASE_FLT_FN (BUILT_IN_FREXP
):
8262 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
8264 CASE_FLT_FN (BUILT_IN_MODF
):
8265 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
8267 case BUILT_IN_STRSTR
:
8268 return fold_builtin_strstr (loc
, arg0
, arg1
, type
);
8270 case BUILT_IN_STRSPN
:
8271 return fold_builtin_strspn (loc
, arg0
, arg1
);
8273 case BUILT_IN_STRCSPN
:
8274 return fold_builtin_strcspn (loc
, arg0
, arg1
);
8276 case BUILT_IN_STRPBRK
:
8277 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
8279 case BUILT_IN_EXPECT
:
8280 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
);
8282 case BUILT_IN_ISGREATER
:
8283 return fold_builtin_unordered_cmp (loc
, fndecl
,
8284 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
8285 case BUILT_IN_ISGREATEREQUAL
:
8286 return fold_builtin_unordered_cmp (loc
, fndecl
,
8287 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
8288 case BUILT_IN_ISLESS
:
8289 return fold_builtin_unordered_cmp (loc
, fndecl
,
8290 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
8291 case BUILT_IN_ISLESSEQUAL
:
8292 return fold_builtin_unordered_cmp (loc
, fndecl
,
8293 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
8294 case BUILT_IN_ISLESSGREATER
:
8295 return fold_builtin_unordered_cmp (loc
, fndecl
,
8296 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
8297 case BUILT_IN_ISUNORDERED
:
8298 return fold_builtin_unordered_cmp (loc
, fndecl
,
8299 arg0
, arg1
, UNORDERED_EXPR
,
8302 /* We do the folding for va_start in the expander. */
8303 case BUILT_IN_VA_START
:
8306 case BUILT_IN_OBJECT_SIZE
:
8307 return fold_builtin_object_size (arg0
, arg1
);
8309 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
8310 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
8312 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
8313 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
8321 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8323 This function returns NULL_TREE if no simplification was possible. */
8326 fold_builtin_3 (location_t loc
, tree fndecl
,
8327 tree arg0
, tree arg1
, tree arg2
)
8329 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8330 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
8332 if (TREE_CODE (arg0
) == ERROR_MARK
8333 || TREE_CODE (arg1
) == ERROR_MARK
8334 || TREE_CODE (arg2
) == ERROR_MARK
)
8337 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
,
8344 CASE_FLT_FN (BUILT_IN_SINCOS
):
8345 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
8347 CASE_FLT_FN (BUILT_IN_FMA
):
8348 return fold_builtin_fma (loc
, arg0
, arg1
, arg2
, type
);
8350 CASE_FLT_FN (BUILT_IN_REMQUO
):
8351 if (validate_arg (arg0
, REAL_TYPE
)
8352 && validate_arg (arg1
, REAL_TYPE
)
8353 && validate_arg (arg2
, POINTER_TYPE
))
8354 return do_mpfr_remquo (arg0
, arg1
, arg2
);
8358 case BUILT_IN_MEMCMP
:
8359 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);;
8361 case BUILT_IN_EXPECT
:
8362 return fold_builtin_expect (loc
, arg0
, arg1
, arg2
);
8364 case BUILT_IN_ADD_OVERFLOW
:
8365 case BUILT_IN_SUB_OVERFLOW
:
8366 case BUILT_IN_MUL_OVERFLOW
:
8367 case BUILT_IN_ADD_OVERFLOW_P
:
8368 case BUILT_IN_SUB_OVERFLOW_P
:
8369 case BUILT_IN_MUL_OVERFLOW_P
:
8370 case BUILT_IN_SADD_OVERFLOW
:
8371 case BUILT_IN_SADDL_OVERFLOW
:
8372 case BUILT_IN_SADDLL_OVERFLOW
:
8373 case BUILT_IN_SSUB_OVERFLOW
:
8374 case BUILT_IN_SSUBL_OVERFLOW
:
8375 case BUILT_IN_SSUBLL_OVERFLOW
:
8376 case BUILT_IN_SMUL_OVERFLOW
:
8377 case BUILT_IN_SMULL_OVERFLOW
:
8378 case BUILT_IN_SMULLL_OVERFLOW
:
8379 case BUILT_IN_UADD_OVERFLOW
:
8380 case BUILT_IN_UADDL_OVERFLOW
:
8381 case BUILT_IN_UADDLL_OVERFLOW
:
8382 case BUILT_IN_USUB_OVERFLOW
:
8383 case BUILT_IN_USUBL_OVERFLOW
:
8384 case BUILT_IN_USUBLL_OVERFLOW
:
8385 case BUILT_IN_UMUL_OVERFLOW
:
8386 case BUILT_IN_UMULL_OVERFLOW
:
8387 case BUILT_IN_UMULLL_OVERFLOW
:
8388 return fold_builtin_arith_overflow (loc
, fcode
, arg0
, arg1
, arg2
);
8396 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
8397 arguments. IGNORE is true if the result of the
8398 function call is ignored. This function returns NULL_TREE if no
8399 simplification was possible. */
8402 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool)
8404 tree ret
= NULL_TREE
;
8409 ret
= fold_builtin_0 (loc
, fndecl
);
8412 ret
= fold_builtin_1 (loc
, fndecl
, args
[0]);
8415 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1]);
8418 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2]);
8421 ret
= fold_builtin_varargs (loc
, fndecl
, args
, nargs
);
8426 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
8427 SET_EXPR_LOCATION (ret
, loc
);
8428 TREE_NO_WARNING (ret
) = 1;
8434 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
8435 list ARGS along with N new arguments in NEWARGS. SKIP is the number
8436 of arguments in ARGS to be omitted. OLDNARGS is the number of
8437 elements in ARGS. */
8440 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
8441 int skip
, tree fndecl
, int n
, va_list newargs
)
8443 int nargs
= oldnargs
- skip
+ n
;
8450 buffer
= XALLOCAVEC (tree
, nargs
);
8451 for (i
= 0; i
< n
; i
++)
8452 buffer
[i
] = va_arg (newargs
, tree
);
8453 for (j
= skip
; j
< oldnargs
; j
++, i
++)
8454 buffer
[i
] = args
[j
];
8457 buffer
= args
+ skip
;
8459 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
8462 /* Return true if FNDECL shouldn't be folded right now.
8463 If a built-in function has an inline attribute always_inline
8464 wrapper, defer folding it after always_inline functions have
8465 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
8466 might not be performed. */
8469 avoid_folding_inline_builtin (tree fndecl
)
8471 return (DECL_DECLARED_INLINE_P (fndecl
)
8472 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
8474 && !cfun
->always_inline_functions_inlined
8475 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
8478 /* A wrapper function for builtin folding that prevents warnings for
8479 "statement without effect" and the like, caused by removing the
8480 call node earlier than the warning is generated. */
8483 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
8485 tree ret
= NULL_TREE
;
8486 tree fndecl
= get_callee_fndecl (exp
);
8488 && TREE_CODE (fndecl
) == FUNCTION_DECL
8489 && DECL_BUILT_IN (fndecl
)
8490 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
8491 yet. Defer folding until we see all the arguments
8492 (after inlining). */
8493 && !CALL_EXPR_VA_ARG_PACK (exp
))
8495 int nargs
= call_expr_nargs (exp
);
8497 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
8498 instead last argument is __builtin_va_arg_pack (). Defer folding
8499 even in that case, until arguments are finalized. */
8500 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
8502 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
8504 && TREE_CODE (fndecl2
) == FUNCTION_DECL
8505 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
8506 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
8510 if (avoid_folding_inline_builtin (fndecl
))
8513 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
8514 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
8515 CALL_EXPR_ARGP (exp
), ignore
);
8518 tree
*args
= CALL_EXPR_ARGP (exp
);
8519 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
8527 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
8528 N arguments are passed in the array ARGARRAY. Return a folded
8529 expression or NULL_TREE if no simplification was possible. */
8532 fold_builtin_call_array (location_t loc
, tree
,
8537 if (TREE_CODE (fn
) != ADDR_EXPR
)
8540 tree fndecl
= TREE_OPERAND (fn
, 0);
8541 if (TREE_CODE (fndecl
) == FUNCTION_DECL
8542 && DECL_BUILT_IN (fndecl
))
8544 /* If last argument is __builtin_va_arg_pack (), arguments to this
8545 function are not finalized yet. Defer folding until they are. */
8546 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
8548 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
8550 && TREE_CODE (fndecl2
) == FUNCTION_DECL
8551 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
8552 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
8555 if (avoid_folding_inline_builtin (fndecl
))
8557 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
8558 return targetm
.fold_builtin (fndecl
, n
, argarray
, false);
8560 return fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
8566 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
8567 along with N new arguments specified as the "..." parameters. SKIP
8568 is the number of arguments in EXP to be omitted. This function is used
8569 to do varargs-to-varargs transformations. */
8572 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
8578 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
8579 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
8585 /* Validate a single argument ARG against a tree code CODE representing
8589 validate_arg (const_tree arg
, enum tree_code code
)
8593 else if (code
== POINTER_TYPE
)
8594 return POINTER_TYPE_P (TREE_TYPE (arg
));
8595 else if (code
== INTEGER_TYPE
)
8596 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
8597 return code
== TREE_CODE (TREE_TYPE (arg
));
8600 /* This function validates the types of a function call argument list
8601 against a specified list of tree_codes. If the last specifier is a 0,
8602 that represents an ellipses, otherwise the last specifier must be a
8605 This is the GIMPLE version of validate_arglist. Eventually we want to
8606 completely convert builtins.c to work from GIMPLEs and the tree based
8607 validate_arglist will then be removed. */
8610 validate_gimple_arglist (const gcall
*call
, ...)
8612 enum tree_code code
;
8618 va_start (ap
, call
);
8623 code
= (enum tree_code
) va_arg (ap
, int);
8627 /* This signifies an ellipses, any further arguments are all ok. */
8631 /* This signifies an endlink, if no arguments remain, return
8632 true, otherwise return false. */
8633 res
= (i
== gimple_call_num_args (call
));
8636 /* If no parameters remain or the parameter's code does not
8637 match the specified code, return false. Otherwise continue
8638 checking any remaining arguments. */
8639 arg
= gimple_call_arg (call
, i
++);
8640 if (!validate_arg (arg
, code
))
8647 /* We need gotos here since we can only have one VA_CLOSE in a
8655 /* Default target-specific builtin expander that does nothing. */
8658 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
8659 rtx target ATTRIBUTE_UNUSED
,
8660 rtx subtarget ATTRIBUTE_UNUSED
,
8661 machine_mode mode ATTRIBUTE_UNUSED
,
8662 int ignore ATTRIBUTE_UNUSED
)
8667 /* Returns true is EXP represents data that would potentially reside
8668 in a readonly section. */
8671 readonly_data_expr (tree exp
)
8675 if (TREE_CODE (exp
) != ADDR_EXPR
)
8678 exp
= get_base_address (TREE_OPERAND (exp
, 0));
8682 /* Make sure we call decl_readonly_section only for trees it
8683 can handle (since it returns true for everything it doesn't
8685 if (TREE_CODE (exp
) == STRING_CST
8686 || TREE_CODE (exp
) == CONSTRUCTOR
8687 || (VAR_P (exp
) && TREE_STATIC (exp
)))
8688 return decl_readonly_section (exp
, 0);
8693 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
8694 to the call, and TYPE is its return type.
8696 Return NULL_TREE if no simplification was possible, otherwise return the
8697 simplified form of the call as a tree.
8699 The simplified form may be a constant or other expression which
8700 computes the same value, but in a more efficient manner (including
8701 calls to other builtin functions).
8703 The call may contain arguments which need to be evaluated, but
8704 which are not useful to determine the result of the call. In
8705 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8706 COMPOUND_EXPR will be an argument which must be evaluated.
8707 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8708 COMPOUND_EXPR in the chain will contain the tree for the simplified
8709 form of the builtin function call. */
8712 fold_builtin_strstr (location_t loc
, tree s1
, tree s2
, tree type
)
8714 if (!validate_arg (s1
, POINTER_TYPE
)
8715 || !validate_arg (s2
, POINTER_TYPE
))
8720 const char *p1
, *p2
;
8729 const char *r
= strstr (p1
, p2
);
8733 return build_int_cst (TREE_TYPE (s1
), 0);
8735 /* Return an offset into the constant string argument. */
8736 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
8737 return fold_convert_loc (loc
, type
, tem
);
8740 /* The argument is const char *, and the result is char *, so we need
8741 a type conversion here to avoid a warning. */
8743 return fold_convert_loc (loc
, type
, s1
);
8748 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
8752 /* New argument list transforming strstr(s1, s2) to
8753 strchr(s1, s2[0]). */
8754 return build_call_expr_loc (loc
, fn
, 2, s1
,
8755 build_int_cst (integer_type_node
, p2
[0]));
8759 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
8760 to the call, and TYPE is its return type.
8762 Return NULL_TREE if no simplification was possible, otherwise return the
8763 simplified form of the call as a tree.
8765 The simplified form may be a constant or other expression which
8766 computes the same value, but in a more efficient manner (including
8767 calls to other builtin functions).
8769 The call may contain arguments which need to be evaluated, but
8770 which are not useful to determine the result of the call. In
8771 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8772 COMPOUND_EXPR will be an argument which must be evaluated.
8773 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8774 COMPOUND_EXPR in the chain will contain the tree for the simplified
8775 form of the builtin function call. */
8778 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
8780 if (!validate_arg (s1
, POINTER_TYPE
)
8781 || !validate_arg (s2
, POINTER_TYPE
))
8786 const char *p1
, *p2
;
8795 const char *r
= strpbrk (p1
, p2
);
8799 return build_int_cst (TREE_TYPE (s1
), 0);
8801 /* Return an offset into the constant string argument. */
8802 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
8803 return fold_convert_loc (loc
, type
, tem
);
8807 /* strpbrk(x, "") == NULL.
8808 Evaluate and ignore s1 in case it had side-effects. */
8809 return omit_one_operand_loc (loc
, TREE_TYPE (s1
), integer_zero_node
, s1
);
8812 return NULL_TREE
; /* Really call strpbrk. */
8814 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
8818 /* New argument list transforming strpbrk(s1, s2) to
8819 strchr(s1, s2[0]). */
8820 return build_call_expr_loc (loc
, fn
, 2, s1
,
8821 build_int_cst (integer_type_node
, p2
[0]));
8825 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
8828 Return NULL_TREE if no simplification was possible, otherwise return the
8829 simplified form of the call as a tree.
8831 The simplified form may be a constant or other expression which
8832 computes the same value, but in a more efficient manner (including
8833 calls to other builtin functions).
8835 The call may contain arguments which need to be evaluated, but
8836 which are not useful to determine the result of the call. In
8837 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8838 COMPOUND_EXPR will be an argument which must be evaluated.
8839 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8840 COMPOUND_EXPR in the chain will contain the tree for the simplified
8841 form of the builtin function call. */
8844 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
8846 if (!validate_arg (s1
, POINTER_TYPE
)
8847 || !validate_arg (s2
, POINTER_TYPE
))
8851 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
8853 /* If either argument is "", return NULL_TREE. */
8854 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
8855 /* Evaluate and ignore both arguments in case either one has
8857 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
8863 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
8866 Return NULL_TREE if no simplification was possible, otherwise return the
8867 simplified form of the call as a tree.
8869 The simplified form may be a constant or other expression which
8870 computes the same value, but in a more efficient manner (including
8871 calls to other builtin functions).
8873 The call may contain arguments which need to be evaluated, but
8874 which are not useful to determine the result of the call. In
8875 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8876 COMPOUND_EXPR will be an argument which must be evaluated.
8877 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8878 COMPOUND_EXPR in the chain will contain the tree for the simplified
8879 form of the builtin function call. */
8882 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
8884 if (!validate_arg (s1
, POINTER_TYPE
)
8885 || !validate_arg (s2
, POINTER_TYPE
))
8889 /* If the first argument is "", return NULL_TREE. */
8890 const char *p1
= c_getstr (s1
);
8891 if (p1
&& *p1
== '\0')
8893 /* Evaluate and ignore argument s2 in case it has
8895 return omit_one_operand_loc (loc
, size_type_node
,
8896 size_zero_node
, s2
);
8899 /* If the second argument is "", return __builtin_strlen(s1). */
8900 const char *p2
= c_getstr (s2
);
8901 if (p2
&& *p2
== '\0')
8903 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
8905 /* If the replacement _DECL isn't initialized, don't do the
8910 return build_call_expr_loc (loc
, fn
, 1, s1
);
8916 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
8917 produced. False otherwise. This is done so that we don't output the error
8918 or warning twice or three times. */
8921 fold_builtin_next_arg (tree exp
, bool va_start_p
)
8923 tree fntype
= TREE_TYPE (current_function_decl
);
8924 int nargs
= call_expr_nargs (exp
);
8926 /* There is good chance the current input_location points inside the
8927 definition of the va_start macro (perhaps on the token for
8928 builtin) in a system header, so warnings will not be emitted.
8929 Use the location in real source code. */
8930 source_location current_location
=
8931 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
8934 if (!stdarg_p (fntype
))
8936 error ("%<va_start%> used in function with fixed args");
8942 if (va_start_p
&& (nargs
!= 2))
8944 error ("wrong number of arguments to function %<va_start%>");
8947 arg
= CALL_EXPR_ARG (exp
, 1);
8949 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
8950 when we checked the arguments and if needed issued a warning. */
8955 /* Evidently an out of date version of <stdarg.h>; can't validate
8956 va_start's second argument, but can still work as intended. */
8957 warning_at (current_location
,
8959 "%<__builtin_next_arg%> called without an argument");
8964 error ("wrong number of arguments to function %<__builtin_next_arg%>");
8967 arg
= CALL_EXPR_ARG (exp
, 0);
8970 if (TREE_CODE (arg
) == SSA_NAME
)
8971 arg
= SSA_NAME_VAR (arg
);
8973 /* We destructively modify the call to be __builtin_va_start (ap, 0)
8974 or __builtin_next_arg (0) the first time we see it, after checking
8975 the arguments and if needed issuing a warning. */
8976 if (!integer_zerop (arg
))
8978 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
8980 /* Strip off all nops for the sake of the comparison. This
8981 is not quite the same as STRIP_NOPS. It does more.
8982 We must also strip off INDIRECT_EXPR for C++ reference
8984 while (CONVERT_EXPR_P (arg
)
8985 || TREE_CODE (arg
) == INDIRECT_REF
)
8986 arg
= TREE_OPERAND (arg
, 0);
8987 if (arg
!= last_parm
)
8989 /* FIXME: Sometimes with the tree optimizers we can get the
8990 not the last argument even though the user used the last
8991 argument. We just warn and set the arg to be the last
8992 argument so that we will get wrong-code because of
8994 warning_at (current_location
,
8996 "second parameter of %<va_start%> not last named argument");
8999 /* Undefined by C99 7.15.1.4p4 (va_start):
9000 "If the parameter parmN is declared with the register storage
9001 class, with a function or array type, or with a type that is
9002 not compatible with the type that results after application of
9003 the default argument promotions, the behavior is undefined."
9005 else if (DECL_REGISTER (arg
))
9007 warning_at (current_location
,
9009 "undefined behavior when second parameter of "
9010 "%<va_start%> is declared with %<register%> storage");
9013 /* We want to verify the second parameter just once before the tree
9014 optimizers are run and then avoid keeping it in the tree,
9015 as otherwise we could warn even for correct code like:
9016 void foo (int i, ...)
9017 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9019 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
9021 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
9027 /* Expand a call EXP to __builtin_object_size. */
9030 expand_builtin_object_size (tree exp
)
9033 int object_size_type
;
9034 tree fndecl
= get_callee_fndecl (exp
);
9036 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
9038 error ("%Kfirst argument of %D must be a pointer, second integer constant",
9040 expand_builtin_trap ();
9044 ost
= CALL_EXPR_ARG (exp
, 1);
9047 if (TREE_CODE (ost
) != INTEGER_CST
9048 || tree_int_cst_sgn (ost
) < 0
9049 || compare_tree_int (ost
, 3) > 0)
9051 error ("%Klast argument of %D is not integer constant between 0 and 3",
9053 expand_builtin_trap ();
9057 object_size_type
= tree_to_shwi (ost
);
9059 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
9062 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9063 FCODE is the BUILT_IN_* to use.
9064 Return NULL_RTX if we failed; the caller should emit a normal call,
9065 otherwise try to get the result in TARGET, if convenient (and in
9066 mode MODE if that's convenient). */
9069 expand_builtin_memory_chk (tree exp
, rtx target
, machine_mode mode
,
9070 enum built_in_function fcode
)
9072 tree dest
, src
, len
, size
;
9074 if (!validate_arglist (exp
,
9076 fcode
== BUILT_IN_MEMSET_CHK
9077 ? INTEGER_TYPE
: POINTER_TYPE
,
9078 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
9081 dest
= CALL_EXPR_ARG (exp
, 0);
9082 src
= CALL_EXPR_ARG (exp
, 1);
9083 len
= CALL_EXPR_ARG (exp
, 2);
9084 size
= CALL_EXPR_ARG (exp
, 3);
9086 if (! tree_fits_uhwi_p (size
))
9089 if (tree_fits_uhwi_p (len
) || integer_all_onesp (size
))
9093 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
9095 warning_at (tree_nonartificial_location (exp
),
9096 0, "%Kcall to %D will always overflow destination buffer",
9097 exp
, get_callee_fndecl (exp
));
9102 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9103 mem{cpy,pcpy,move,set} is available. */
9106 case BUILT_IN_MEMCPY_CHK
:
9107 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
9109 case BUILT_IN_MEMPCPY_CHK
:
9110 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
9112 case BUILT_IN_MEMMOVE_CHK
:
9113 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
9115 case BUILT_IN_MEMSET_CHK
:
9116 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
9125 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
9126 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
9127 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
9128 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
9130 else if (fcode
== BUILT_IN_MEMSET_CHK
)
9134 unsigned int dest_align
= get_pointer_alignment (dest
);
9136 /* If DEST is not a pointer type, call the normal function. */
9137 if (dest_align
== 0)
9140 /* If SRC and DEST are the same (and not volatile), do nothing. */
9141 if (operand_equal_p (src
, dest
, 0))
9145 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
9147 /* Evaluate and ignore LEN in case it has side-effects. */
9148 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
9149 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
9152 expr
= fold_build_pointer_plus (dest
, len
);
9153 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
9156 /* __memmove_chk special case. */
9157 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
9159 unsigned int src_align
= get_pointer_alignment (src
);
9164 /* If src is categorized for a readonly section we can use
9165 normal __memcpy_chk. */
9166 if (readonly_data_expr (src
))
9168 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
9171 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
9172 dest
, src
, len
, size
);
9173 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
9174 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
9175 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
9182 /* Emit warning if a buffer overflow is detected at compile time. */
9185 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
9189 location_t loc
= tree_nonartificial_location (exp
);
9193 case BUILT_IN_STRCPY_CHK
:
9194 case BUILT_IN_STPCPY_CHK
:
9195 /* For __strcat_chk the warning will be emitted only if overflowing
9196 by at least strlen (dest) + 1 bytes. */
9197 case BUILT_IN_STRCAT_CHK
:
9198 len
= CALL_EXPR_ARG (exp
, 1);
9199 size
= CALL_EXPR_ARG (exp
, 2);
9202 case BUILT_IN_STRNCAT_CHK
:
9203 case BUILT_IN_STRNCPY_CHK
:
9204 case BUILT_IN_STPNCPY_CHK
:
9205 len
= CALL_EXPR_ARG (exp
, 2);
9206 size
= CALL_EXPR_ARG (exp
, 3);
9208 case BUILT_IN_SNPRINTF_CHK
:
9209 case BUILT_IN_VSNPRINTF_CHK
:
9210 len
= CALL_EXPR_ARG (exp
, 1);
9211 size
= CALL_EXPR_ARG (exp
, 3);
9220 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
9225 len
= c_strlen (len
, 1);
9226 if (! len
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
9229 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
9231 tree src
= CALL_EXPR_ARG (exp
, 1);
9232 if (! src
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
9234 src
= c_strlen (src
, 1);
9235 if (! src
|| ! tree_fits_uhwi_p (src
))
9237 warning_at (loc
, 0, "%Kcall to %D might overflow destination buffer",
9238 exp
, get_callee_fndecl (exp
));
9241 else if (tree_int_cst_lt (src
, size
))
9244 else if (! tree_fits_uhwi_p (len
) || ! tree_int_cst_lt (size
, len
))
9247 warning_at (loc
, 0, "%Kcall to %D will always overflow destination buffer",
9248 exp
, get_callee_fndecl (exp
));
9251 /* Emit warning if a buffer overflow is detected at compile time
9252 in __sprintf_chk/__vsprintf_chk calls. */
9255 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
9257 tree size
, len
, fmt
;
9258 const char *fmt_str
;
9259 int nargs
= call_expr_nargs (exp
);
9261 /* Verify the required arguments in the original call. */
9265 size
= CALL_EXPR_ARG (exp
, 2);
9266 fmt
= CALL_EXPR_ARG (exp
, 3);
9268 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
9271 /* Check whether the format is a literal string constant. */
9272 fmt_str
= c_getstr (fmt
);
9273 if (fmt_str
== NULL
)
9276 if (!init_target_chars ())
9279 /* If the format doesn't contain % args or %%, we know its size. */
9280 if (strchr (fmt_str
, target_percent
) == 0)
9281 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
9282 /* If the format is "%s" and first ... argument is a string literal,
9284 else if (fcode
== BUILT_IN_SPRINTF_CHK
9285 && strcmp (fmt_str
, target_percent_s
) == 0)
9291 arg
= CALL_EXPR_ARG (exp
, 4);
9292 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
9295 len
= c_strlen (arg
, 1);
9296 if (!len
|| ! tree_fits_uhwi_p (len
))
9302 if (! tree_int_cst_lt (len
, size
))
9303 warning_at (tree_nonartificial_location (exp
),
9304 0, "%Kcall to %D will always overflow destination buffer",
9305 exp
, get_callee_fndecl (exp
));
9308 /* Emit warning if a free is called with address of a variable. */
9311 maybe_emit_free_warning (tree exp
)
9313 tree arg
= CALL_EXPR_ARG (exp
, 0);
9316 if (TREE_CODE (arg
) != ADDR_EXPR
)
9319 arg
= get_base_address (TREE_OPERAND (arg
, 0));
9320 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
9323 if (SSA_VAR_P (arg
))
9324 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
9325 "%Kattempt to free a non-heap object %qD", exp
, arg
);
9327 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
9328 "%Kattempt to free a non-heap object", exp
);
9331 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9335 fold_builtin_object_size (tree ptr
, tree ost
)
9337 unsigned HOST_WIDE_INT bytes
;
9338 int object_size_type
;
9340 if (!validate_arg (ptr
, POINTER_TYPE
)
9341 || !validate_arg (ost
, INTEGER_TYPE
))
9346 if (TREE_CODE (ost
) != INTEGER_CST
9347 || tree_int_cst_sgn (ost
) < 0
9348 || compare_tree_int (ost
, 3) > 0)
9351 object_size_type
= tree_to_shwi (ost
);
9353 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9354 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9355 and (size_t) 0 for types 2 and 3. */
9356 if (TREE_SIDE_EFFECTS (ptr
))
9357 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
9359 if (TREE_CODE (ptr
) == ADDR_EXPR
)
9361 compute_builtin_object_size (ptr
, object_size_type
, &bytes
);
9362 if (wi::fits_to_tree_p (bytes
, size_type_node
))
9363 return build_int_cstu (size_type_node
, bytes
);
9365 else if (TREE_CODE (ptr
) == SSA_NAME
)
9367 /* If object size is not known yet, delay folding until
9368 later. Maybe subsequent passes will help determining
9370 if (compute_builtin_object_size (ptr
, object_size_type
, &bytes
)
9371 && wi::fits_to_tree_p (bytes
, size_type_node
))
9372 return build_int_cstu (size_type_node
, bytes
);
9378 /* Builtins with folding operations that operate on "..." arguments
9379 need special handling; we need to store the arguments in a convenient
9380 data structure before attempting any folding. Fortunately there are
9381 only a few builtins that fall into this category. FNDECL is the
9382 function, EXP is the CALL_EXPR for the call. */
9385 fold_builtin_varargs (location_t loc
, tree fndecl
, tree
*args
, int nargs
)
9387 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9388 tree ret
= NULL_TREE
;
9392 case BUILT_IN_FPCLASSIFY
:
9393 ret
= fold_builtin_fpclassify (loc
, args
, nargs
);
9401 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
9402 SET_EXPR_LOCATION (ret
, loc
);
9403 TREE_NO_WARNING (ret
) = 1;
9409 /* Initialize format string characters in the target charset. */
9412 init_target_chars (void)
9417 target_newline
= lang_hooks
.to_target_charset ('\n');
9418 target_percent
= lang_hooks
.to_target_charset ('%');
9419 target_c
= lang_hooks
.to_target_charset ('c');
9420 target_s
= lang_hooks
.to_target_charset ('s');
9421 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
9425 target_percent_c
[0] = target_percent
;
9426 target_percent_c
[1] = target_c
;
9427 target_percent_c
[2] = '\0';
9429 target_percent_s
[0] = target_percent
;
9430 target_percent_s
[1] = target_s
;
9431 target_percent_s
[2] = '\0';
9433 target_percent_s_newline
[0] = target_percent
;
9434 target_percent_s_newline
[1] = target_s
;
9435 target_percent_s_newline
[2] = target_newline
;
9436 target_percent_s_newline
[3] = '\0';
9443 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
9444 and no overflow/underflow occurred. INEXACT is true if M was not
9445 exactly calculated. TYPE is the tree type for the result. This
9446 function assumes that you cleared the MPFR flags and then
9447 calculated M to see if anything subsequently set a flag prior to
9448 entering this function. Return NULL_TREE if any checks fail. */
9451 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
9453 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9454 overflow/underflow occurred. If -frounding-math, proceed iff the
9455 result of calling FUNC was exact. */
9456 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
9457 && (!flag_rounding_math
|| !inexact
))
9461 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
9462 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
9463 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9464 but the mpft_t is not, then we underflowed in the
9466 if (real_isfinite (&rr
)
9467 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
9469 REAL_VALUE_TYPE rmode
;
9471 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
9472 /* Proceed iff the specified mode can hold the value. */
9473 if (real_identical (&rmode
, &rr
))
9474 return build_real (type
, rmode
);
9480 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
9481 number and no overflow/underflow occurred. INEXACT is true if M
9482 was not exactly calculated. TYPE is the tree type for the result.
9483 This function assumes that you cleared the MPFR flags and then
9484 calculated M to see if anything subsequently set a flag prior to
9485 entering this function. Return NULL_TREE if any checks fail, if
9486 FORCE_CONVERT is true, then bypass the checks. */
9489 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
9491 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9492 overflow/underflow occurred. If -frounding-math, proceed iff the
9493 result of calling FUNC was exact. */
9495 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
9496 && !mpfr_overflow_p () && !mpfr_underflow_p ()
9497 && (!flag_rounding_math
|| !inexact
)))
9499 REAL_VALUE_TYPE re
, im
;
9501 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
9502 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
9503 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
9504 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9505 but the mpft_t is not, then we underflowed in the
9508 || (real_isfinite (&re
) && real_isfinite (&im
)
9509 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
9510 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
9512 REAL_VALUE_TYPE re_mode
, im_mode
;
9514 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
9515 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
9516 /* Proceed iff the specified mode can hold the value. */
9518 || (real_identical (&re_mode
, &re
)
9519 && real_identical (&im_mode
, &im
)))
9520 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
9521 build_real (TREE_TYPE (type
), im_mode
));
9527 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
9528 the pointer *(ARG_QUO) and return the result. The type is taken
9529 from the type of ARG0 and is used for setting the precision of the
9530 calculation and results. */
9533 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
9535 tree
const type
= TREE_TYPE (arg0
);
9536 tree result
= NULL_TREE
;
9541 /* To proceed, MPFR must exactly represent the target floating point
9542 format, which only happens when the target base equals two. */
9543 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
9544 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
9545 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
9547 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
9548 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
9550 if (real_isfinite (ra0
) && real_isfinite (ra1
))
9552 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
9553 const int prec
= fmt
->p
;
9554 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
9559 mpfr_inits2 (prec
, m0
, m1
, NULL
);
9560 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
9561 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
9562 mpfr_clear_flags ();
9563 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
9564 /* Remquo is independent of the rounding mode, so pass
9565 inexact=0 to do_mpfr_ckconv(). */
9566 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
9567 mpfr_clears (m0
, m1
, NULL
);
9570 /* MPFR calculates quo in the host's long so it may
9571 return more bits in quo than the target int can hold
9572 if sizeof(host long) > sizeof(target int). This can
9573 happen even for native compilers in LP64 mode. In
9574 these cases, modulo the quo value with the largest
9575 number that the target int can hold while leaving one
9576 bit for the sign. */
9577 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
9578 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
9580 /* Dereference the quo pointer argument. */
9581 arg_quo
= build_fold_indirect_ref (arg_quo
);
9582 /* Proceed iff a valid pointer type was passed in. */
9583 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
9585 /* Set the value. */
9587 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
9588 build_int_cst (TREE_TYPE (arg_quo
),
9590 TREE_SIDE_EFFECTS (result_quo
) = 1;
9591 /* Combine the quo assignment with the rem. */
9592 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
9593 result_quo
, result_rem
));
9601 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
9602 resulting value as a tree with type TYPE. The mpfr precision is
9603 set to the precision of TYPE. We assume that this mpfr function
9604 returns zero if the result could be calculated exactly within the
9605 requested precision. In addition, the integer pointer represented
9606 by ARG_SG will be dereferenced and set to the appropriate signgam
9610 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
9612 tree result
= NULL_TREE
;
9616 /* To proceed, MPFR must exactly represent the target floating point
9617 format, which only happens when the target base equals two. Also
9618 verify ARG is a constant and that ARG_SG is an int pointer. */
9619 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
9620 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
9621 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
9622 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
9624 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
9626 /* In addition to NaN and Inf, the argument cannot be zero or a
9627 negative integer. */
9628 if (real_isfinite (ra
)
9629 && ra
->cl
!= rvc_zero
9630 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
9632 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
9633 const int prec
= fmt
->p
;
9634 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
9639 mpfr_init2 (m
, prec
);
9640 mpfr_from_real (m
, ra
, GMP_RNDN
);
9641 mpfr_clear_flags ();
9642 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
9643 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
9649 /* Dereference the arg_sg pointer argument. */
9650 arg_sg
= build_fold_indirect_ref (arg_sg
);
9651 /* Assign the signgam value into *arg_sg. */
9652 result_sg
= fold_build2 (MODIFY_EXPR
,
9653 TREE_TYPE (arg_sg
), arg_sg
,
9654 build_int_cst (TREE_TYPE (arg_sg
), sg
));
9655 TREE_SIDE_EFFECTS (result_sg
) = 1;
9656 /* Combine the signgam assignment with the lgamma result. */
9657 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
9658 result_sg
, result_lg
));
9666 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
9667 mpc function FUNC on it and return the resulting value as a tree
9668 with type TYPE. The mpfr precision is set to the precision of
9669 TYPE. We assume that function FUNC returns zero if the result
9670 could be calculated exactly within the requested precision. If
9671 DO_NONFINITE is true, then fold expressions containing Inf or NaN
9672 in the arguments and/or results. */
9675 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
9676 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
9678 tree result
= NULL_TREE
;
9683 /* To proceed, MPFR must exactly represent the target floating point
9684 format, which only happens when the target base equals two. */
9685 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
9686 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
9687 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
9688 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
9689 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
9691 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
9692 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
9693 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
9694 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
9697 || (real_isfinite (re0
) && real_isfinite (im0
)
9698 && real_isfinite (re1
) && real_isfinite (im1
)))
9700 const struct real_format
*const fmt
=
9701 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
9702 const int prec
= fmt
->p
;
9703 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
9704 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
9708 mpc_init2 (m0
, prec
);
9709 mpc_init2 (m1
, prec
);
9710 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
9711 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
9712 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
9713 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
9714 mpfr_clear_flags ();
9715 inexact
= func (m0
, m0
, m1
, crnd
);
9716 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
9725 /* A wrapper function for builtin folding that prevents warnings for
9726 "statement without effect" and the like, caused by removing the
9727 call node earlier than the warning is generated. */
9730 fold_call_stmt (gcall
*stmt
, bool ignore
)
9732 tree ret
= NULL_TREE
;
9733 tree fndecl
= gimple_call_fndecl (stmt
);
9734 location_t loc
= gimple_location (stmt
);
9736 && TREE_CODE (fndecl
) == FUNCTION_DECL
9737 && DECL_BUILT_IN (fndecl
)
9738 && !gimple_call_va_arg_pack_p (stmt
))
9740 int nargs
= gimple_call_num_args (stmt
);
9741 tree
*args
= (nargs
> 0
9742 ? gimple_call_arg_ptr (stmt
, 0)
9743 : &error_mark_node
);
9745 if (avoid_folding_inline_builtin (fndecl
))
9747 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
9749 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
9753 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
9756 /* Propagate location information from original call to
9757 expansion of builtin. Otherwise things like
9758 maybe_emit_chk_warning, that operate on the expansion
9759 of a builtin, will use the wrong location information. */
9760 if (gimple_has_location (stmt
))
9763 if (TREE_CODE (ret
) == NOP_EXPR
)
9764 realret
= TREE_OPERAND (ret
, 0);
9765 if (CAN_HAVE_LOCATION_P (realret
)
9766 && !EXPR_HAS_LOCATION (realret
))
9767 SET_EXPR_LOCATION (realret
, loc
);
9777 /* Look up the function in builtin_decl that corresponds to DECL
9778 and set ASMSPEC as its user assembler name. DECL must be a
9779 function decl that declares a builtin. */
9782 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
9784 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
9785 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
9788 tree builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
9789 set_user_assembler_name (builtin
, asmspec
);
9791 if (DECL_FUNCTION_CODE (decl
) == BUILT_IN_FFS
9792 && INT_TYPE_SIZE
< BITS_PER_WORD
)
9794 set_user_assembler_libfunc ("ffs", asmspec
);
9795 set_optab_libfunc (ffs_optab
, mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0),
9800 /* Return true if DECL is a builtin that expands to a constant or similarly
9803 is_simple_builtin (tree decl
)
9805 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
9806 switch (DECL_FUNCTION_CODE (decl
))
9808 /* Builtins that expand to constants. */
9809 case BUILT_IN_CONSTANT_P
:
9810 case BUILT_IN_EXPECT
:
9811 case BUILT_IN_OBJECT_SIZE
:
9812 case BUILT_IN_UNREACHABLE
:
9813 /* Simple register moves or loads from stack. */
9814 case BUILT_IN_ASSUME_ALIGNED
:
9815 case BUILT_IN_RETURN_ADDRESS
:
9816 case BUILT_IN_EXTRACT_RETURN_ADDR
:
9817 case BUILT_IN_FROB_RETURN_ADDR
:
9818 case BUILT_IN_RETURN
:
9819 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
9820 case BUILT_IN_FRAME_ADDRESS
:
9821 case BUILT_IN_VA_END
:
9822 case BUILT_IN_STACK_SAVE
:
9823 case BUILT_IN_STACK_RESTORE
:
9824 /* Exception state returns or moves registers around. */
9825 case BUILT_IN_EH_FILTER
:
9826 case BUILT_IN_EH_POINTER
:
9827 case BUILT_IN_EH_COPY_VALUES
:
9837 /* Return true if DECL is a builtin that is not expensive, i.e., they are
9838 most probably expanded inline into reasonably simple code. This is a
9839 superset of is_simple_builtin. */
9841 is_inexpensive_builtin (tree decl
)
9845 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
9847 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
9848 switch (DECL_FUNCTION_CODE (decl
))
9851 case BUILT_IN_ALLOCA
:
9852 case BUILT_IN_ALLOCA_WITH_ALIGN
:
9853 case BUILT_IN_BSWAP16
:
9854 case BUILT_IN_BSWAP32
:
9855 case BUILT_IN_BSWAP64
:
9857 case BUILT_IN_CLZIMAX
:
9859 case BUILT_IN_CLZLL
:
9861 case BUILT_IN_CTZIMAX
:
9863 case BUILT_IN_CTZLL
:
9865 case BUILT_IN_FFSIMAX
:
9867 case BUILT_IN_FFSLL
:
9868 case BUILT_IN_IMAXABS
:
9869 case BUILT_IN_FINITE
:
9870 case BUILT_IN_FINITEF
:
9871 case BUILT_IN_FINITEL
:
9872 case BUILT_IN_FINITED32
:
9873 case BUILT_IN_FINITED64
:
9874 case BUILT_IN_FINITED128
:
9875 case BUILT_IN_FPCLASSIFY
:
9876 case BUILT_IN_ISFINITE
:
9877 case BUILT_IN_ISINF_SIGN
:
9878 case BUILT_IN_ISINF
:
9879 case BUILT_IN_ISINFF
:
9880 case BUILT_IN_ISINFL
:
9881 case BUILT_IN_ISINFD32
:
9882 case BUILT_IN_ISINFD64
:
9883 case BUILT_IN_ISINFD128
:
9884 case BUILT_IN_ISNAN
:
9885 case BUILT_IN_ISNANF
:
9886 case BUILT_IN_ISNANL
:
9887 case BUILT_IN_ISNAND32
:
9888 case BUILT_IN_ISNAND64
:
9889 case BUILT_IN_ISNAND128
:
9890 case BUILT_IN_ISNORMAL
:
9891 case BUILT_IN_ISGREATER
:
9892 case BUILT_IN_ISGREATEREQUAL
:
9893 case BUILT_IN_ISLESS
:
9894 case BUILT_IN_ISLESSEQUAL
:
9895 case BUILT_IN_ISLESSGREATER
:
9896 case BUILT_IN_ISUNORDERED
:
9897 case BUILT_IN_VA_ARG_PACK
:
9898 case BUILT_IN_VA_ARG_PACK_LEN
:
9899 case BUILT_IN_VA_COPY
:
9901 case BUILT_IN_SAVEREGS
:
9902 case BUILT_IN_POPCOUNTL
:
9903 case BUILT_IN_POPCOUNTLL
:
9904 case BUILT_IN_POPCOUNTIMAX
:
9905 case BUILT_IN_POPCOUNT
:
9906 case BUILT_IN_PARITYL
:
9907 case BUILT_IN_PARITYLL
:
9908 case BUILT_IN_PARITYIMAX
:
9909 case BUILT_IN_PARITY
:
9911 case BUILT_IN_LLABS
:
9912 case BUILT_IN_PREFETCH
:
9913 case BUILT_IN_ACC_ON_DEVICE
:
9917 return is_simple_builtin (decl
);
9923 /* Return true if T is a constant and the value cast to a target char
9924 can be represented by a host char.
9925 Store the casted char constant in *P if so. */
9928 target_char_cst_p (tree t
, char *p
)
9930 if (!tree_fits_uhwi_p (t
) || CHAR_TYPE_SIZE
!= HOST_BITS_PER_CHAR
)
9933 *p
= (char)tree_to_uhwi (t
);