1 /* Expand builtin functions.
2 Copyright (C) 1988-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
26 #include "coretypes.h"
35 #include "stringpool.h"
37 #include "tree-ssanames.h"
42 #include "diagnostic-core.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "stor-layout.h"
49 #include "tree-object-size.h"
59 #include "typeclass.h"
60 #include "langhooks.h"
61 #include "value-prof.h"
65 #include "tree-chkp.h"
67 #include "internal-fn.h"
68 #include "case-cfn-macros.h"
69 #include "gimple-fold.h"
72 struct target_builtins default_target_builtins
;
74 struct target_builtins
*this_target_builtins
= &default_target_builtins
;
77 /* Define the names of the builtin function types and codes. */
78 const char *const built_in_class_names
[BUILT_IN_LAST
]
79 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
81 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
82 const char * built_in_names
[(int) END_BUILTINS
] =
84 #include "builtins.def"
87 /* Setup an array of builtin_info_type, make sure each element decl is
88 initialized to NULL_TREE. */
89 builtin_info_type builtin_info
[(int)END_BUILTINS
];
91 /* Non-zero if __builtin_constant_p should be folded right away. */
92 bool force_folding_builtin_constant_p
;
94 static rtx
c_readstr (const char *, machine_mode
);
95 static int target_char_cast (tree
, char *);
96 static rtx
get_memory_rtx (tree
, tree
);
97 static int apply_args_size (void);
98 static int apply_result_size (void);
99 static rtx
result_vector (int, rtx
);
100 static void expand_builtin_prefetch (tree
);
101 static rtx
expand_builtin_apply_args (void);
102 static rtx
expand_builtin_apply_args_1 (void);
103 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
104 static void expand_builtin_return (rtx
);
105 static enum type_class
type_to_class (tree
);
106 static rtx
expand_builtin_classify_type (tree
);
107 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
108 static rtx
expand_builtin_mathfn_ternary (tree
, rtx
, rtx
);
109 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
);
110 static rtx
expand_builtin_sincos (tree
);
111 static rtx
expand_builtin_cexpi (tree
, rtx
);
112 static rtx
expand_builtin_int_roundingfn (tree
, rtx
);
113 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
);
114 static rtx
expand_builtin_next_arg (void);
115 static rtx
expand_builtin_va_start (tree
);
116 static rtx
expand_builtin_va_end (tree
);
117 static rtx
expand_builtin_va_copy (tree
);
118 static rtx
expand_builtin_strcmp (tree
, rtx
);
119 static rtx
expand_builtin_strncmp (tree
, rtx
, machine_mode
);
120 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, machine_mode
);
121 static rtx
expand_builtin_memcpy (tree
, rtx
);
122 static rtx
expand_builtin_memcpy_with_bounds (tree
, rtx
);
123 static rtx
expand_builtin_memcpy_args (tree
, tree
, tree
, rtx
, tree
);
124 static rtx
expand_builtin_mempcpy (tree
, rtx
, machine_mode
);
125 static rtx
expand_builtin_mempcpy_with_bounds (tree
, rtx
, machine_mode
);
126 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, rtx
,
127 machine_mode
, int, tree
);
128 static rtx
expand_builtin_strcpy (tree
, rtx
);
129 static rtx
expand_builtin_strcpy_args (tree
, tree
, rtx
);
130 static rtx
expand_builtin_stpcpy (tree
, rtx
, machine_mode
);
131 static rtx
expand_builtin_strncpy (tree
, rtx
);
132 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, machine_mode
);
133 static rtx
expand_builtin_memset (tree
, rtx
, machine_mode
);
134 static rtx
expand_builtin_memset_with_bounds (tree
, rtx
, machine_mode
);
135 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, machine_mode
, tree
);
136 static rtx
expand_builtin_bzero (tree
);
137 static rtx
expand_builtin_strlen (tree
, rtx
, machine_mode
);
138 static rtx
expand_builtin_alloca (tree
, bool);
139 static rtx
expand_builtin_unop (machine_mode
, tree
, rtx
, rtx
, optab
);
140 static rtx
expand_builtin_frame_address (tree
, tree
);
141 static tree
stabilize_va_list_loc (location_t
, tree
, int);
142 static rtx
expand_builtin_expect (tree
, rtx
);
143 static tree
fold_builtin_constant_p (tree
);
144 static tree
fold_builtin_classify_type (tree
);
145 static tree
fold_builtin_strlen (location_t
, tree
, tree
);
146 static tree
fold_builtin_inf (location_t
, tree
, int);
147 static tree
rewrite_call_expr (location_t
, tree
, int, tree
, int, ...);
148 static bool validate_arg (const_tree
, enum tree_code code
);
149 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
150 static rtx
expand_builtin_signbit (tree
, rtx
);
151 static tree
fold_builtin_memchr (location_t
, tree
, tree
, tree
, tree
);
152 static tree
fold_builtin_memcmp (location_t
, tree
, tree
, tree
);
153 static tree
fold_builtin_strcmp (location_t
, tree
, tree
);
154 static tree
fold_builtin_strncmp (location_t
, tree
, tree
, tree
);
155 static tree
fold_builtin_isascii (location_t
, tree
);
156 static tree
fold_builtin_toascii (location_t
, tree
);
157 static tree
fold_builtin_isdigit (location_t
, tree
);
158 static tree
fold_builtin_fabs (location_t
, tree
, tree
);
159 static tree
fold_builtin_abs (location_t
, tree
, tree
);
160 static tree
fold_builtin_unordered_cmp (location_t
, tree
, tree
, tree
, enum tree_code
,
162 static tree
fold_builtin_0 (location_t
, tree
);
163 static tree
fold_builtin_1 (location_t
, tree
, tree
);
164 static tree
fold_builtin_2 (location_t
, tree
, tree
, tree
);
165 static tree
fold_builtin_3 (location_t
, tree
, tree
, tree
, tree
);
166 static tree
fold_builtin_varargs (location_t
, tree
, tree
*, int);
168 static tree
fold_builtin_strpbrk (location_t
, tree
, tree
, tree
);
169 static tree
fold_builtin_strstr (location_t
, tree
, tree
, tree
);
170 static tree
fold_builtin_strspn (location_t
, tree
, tree
);
171 static tree
fold_builtin_strcspn (location_t
, tree
, tree
);
173 static rtx
expand_builtin_object_size (tree
);
174 static rtx
expand_builtin_memory_chk (tree
, rtx
, machine_mode
,
175 enum built_in_function
);
176 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
177 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
178 static void maybe_emit_free_warning (tree
);
179 static tree
fold_builtin_object_size (tree
, tree
);
181 unsigned HOST_WIDE_INT target_newline
;
182 unsigned HOST_WIDE_INT target_percent
;
183 static unsigned HOST_WIDE_INT target_c
;
184 static unsigned HOST_WIDE_INT target_s
;
185 char target_percent_c
[3];
186 char target_percent_s
[3];
187 char target_percent_s_newline
[4];
188 static tree
do_mpfr_remquo (tree
, tree
, tree
);
189 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
190 static void expand_builtin_sync_synchronize (void);
192 /* Return true if NAME starts with __builtin_ or __sync_. */
195 is_builtin_name (const char *name
)
197 if (strncmp (name
, "__builtin_", 10) == 0)
199 if (strncmp (name
, "__sync_", 7) == 0)
201 if (strncmp (name
, "__atomic_", 9) == 0)
204 && (!strcmp (name
, "__cilkrts_detach")
205 || !strcmp (name
, "__cilkrts_pop_frame")))
211 /* Return true if DECL is a function symbol representing a built-in. */
214 is_builtin_fn (tree decl
)
216 return TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
);
219 /* Return true if NODE should be considered for inline expansion regardless
220 of the optimization level. This means whenever a function is invoked with
221 its "internal" name, which normally contains the prefix "__builtin". */
224 called_as_built_in (tree node
)
226 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
227 we want the name used to call the function, not the name it
229 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
230 return is_builtin_name (name
);
233 /* Compute values M and N such that M divides (address of EXP - N) and such
234 that N < M. If these numbers can be determined, store M in alignp and N in
235 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
236 *alignp and any bit-offset to *bitposp.
238 Note that the address (and thus the alignment) computed here is based
239 on the address to which a symbol resolves, whereas DECL_ALIGN is based
240 on the address at which an object is actually located. These two
241 addresses are not always the same. For example, on ARM targets,
242 the address &foo of a Thumb function foo() has the lowest bit set,
243 whereas foo() itself starts on an even address.
245 If ADDR_P is true we are taking the address of the memory reference EXP
246 and thus cannot rely on the access taking place. */
249 get_object_alignment_2 (tree exp
, unsigned int *alignp
,
250 unsigned HOST_WIDE_INT
*bitposp
, bool addr_p
)
252 HOST_WIDE_INT bitsize
, bitpos
;
255 int unsignedp
, reversep
, volatilep
;
256 unsigned int align
= BITS_PER_UNIT
;
257 bool known_alignment
= false;
259 /* Get the innermost object and the constant (bitpos) and possibly
260 variable (offset) offset of the access. */
261 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode
,
262 &unsignedp
, &reversep
, &volatilep
);
264 /* Extract alignment information from the innermost object and
265 possibly adjust bitpos and offset. */
266 if (TREE_CODE (exp
) == FUNCTION_DECL
)
268 /* Function addresses can encode extra information besides their
269 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
270 allows the low bit to be used as a virtual bit, we know
271 that the address itself must be at least 2-byte aligned. */
272 if (TARGET_PTRMEMFUNC_VBIT_LOCATION
== ptrmemfunc_vbit_in_pfn
)
273 align
= 2 * BITS_PER_UNIT
;
275 else if (TREE_CODE (exp
) == LABEL_DECL
)
277 else if (TREE_CODE (exp
) == CONST_DECL
)
279 /* The alignment of a CONST_DECL is determined by its initializer. */
280 exp
= DECL_INITIAL (exp
);
281 align
= TYPE_ALIGN (TREE_TYPE (exp
));
282 if (CONSTANT_CLASS_P (exp
))
283 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
285 known_alignment
= true;
287 else if (DECL_P (exp
))
289 align
= DECL_ALIGN (exp
);
290 known_alignment
= true;
292 else if (TREE_CODE (exp
) == INDIRECT_REF
293 || TREE_CODE (exp
) == MEM_REF
294 || TREE_CODE (exp
) == TARGET_MEM_REF
)
296 tree addr
= TREE_OPERAND (exp
, 0);
298 unsigned HOST_WIDE_INT ptr_bitpos
;
299 unsigned HOST_WIDE_INT ptr_bitmask
= ~0;
301 /* If the address is explicitely aligned, handle that. */
302 if (TREE_CODE (addr
) == BIT_AND_EXPR
303 && TREE_CODE (TREE_OPERAND (addr
, 1)) == INTEGER_CST
)
305 ptr_bitmask
= TREE_INT_CST_LOW (TREE_OPERAND (addr
, 1));
306 ptr_bitmask
*= BITS_PER_UNIT
;
307 align
= least_bit_hwi (ptr_bitmask
);
308 addr
= TREE_OPERAND (addr
, 0);
312 = get_pointer_alignment_1 (addr
, &ptr_align
, &ptr_bitpos
);
313 align
= MAX (ptr_align
, align
);
315 /* Re-apply explicit alignment to the bitpos. */
316 ptr_bitpos
&= ptr_bitmask
;
318 /* The alignment of the pointer operand in a TARGET_MEM_REF
319 has to take the variable offset parts into account. */
320 if (TREE_CODE (exp
) == TARGET_MEM_REF
)
324 unsigned HOST_WIDE_INT step
= 1;
326 step
= TREE_INT_CST_LOW (TMR_STEP (exp
));
327 align
= MIN (align
, least_bit_hwi (step
) * BITS_PER_UNIT
);
329 if (TMR_INDEX2 (exp
))
330 align
= BITS_PER_UNIT
;
331 known_alignment
= false;
334 /* When EXP is an actual memory reference then we can use
335 TYPE_ALIGN of a pointer indirection to derive alignment.
336 Do so only if get_pointer_alignment_1 did not reveal absolute
337 alignment knowledge and if using that alignment would
338 improve the situation. */
339 if (!addr_p
&& !known_alignment
340 && TYPE_ALIGN (TREE_TYPE (exp
)) > align
)
341 align
= TYPE_ALIGN (TREE_TYPE (exp
));
344 /* Else adjust bitpos accordingly. */
345 bitpos
+= ptr_bitpos
;
346 if (TREE_CODE (exp
) == MEM_REF
347 || TREE_CODE (exp
) == TARGET_MEM_REF
)
348 bitpos
+= mem_ref_offset (exp
).to_short_addr () * BITS_PER_UNIT
;
351 else if (TREE_CODE (exp
) == STRING_CST
)
353 /* STRING_CST are the only constant objects we allow to be not
354 wrapped inside a CONST_DECL. */
355 align
= TYPE_ALIGN (TREE_TYPE (exp
));
356 if (CONSTANT_CLASS_P (exp
))
357 align
= (unsigned) CONSTANT_ALIGNMENT (exp
, align
);
359 known_alignment
= true;
362 /* If there is a non-constant offset part extract the maximum
363 alignment that can prevail. */
366 unsigned int trailing_zeros
= tree_ctz (offset
);
367 if (trailing_zeros
< HOST_BITS_PER_INT
)
369 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
371 align
= MIN (align
, inner
);
376 *bitposp
= bitpos
& (*alignp
- 1);
377 return known_alignment
;
380 /* For a memory reference expression EXP compute values M and N such that M
381 divides (&EXP - N) and such that N < M. If these numbers can be determined,
382 store M in alignp and N in *BITPOSP and return true. Otherwise return false
383 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
386 get_object_alignment_1 (tree exp
, unsigned int *alignp
,
387 unsigned HOST_WIDE_INT
*bitposp
)
389 return get_object_alignment_2 (exp
, alignp
, bitposp
, false);
392 /* Return the alignment in bits of EXP, an object. */
395 get_object_alignment (tree exp
)
397 unsigned HOST_WIDE_INT bitpos
= 0;
400 get_object_alignment_1 (exp
, &align
, &bitpos
);
402 /* align and bitpos now specify known low bits of the pointer.
403 ptr & (align - 1) == bitpos. */
406 align
= least_bit_hwi (bitpos
);
410 /* For a pointer valued expression EXP compute values M and N such that M
411 divides (EXP - N) and such that N < M. If these numbers can be determined,
412 store M in alignp and N in *BITPOSP and return true. Return false if
413 the results are just a conservative approximation.
415 If EXP is not a pointer, false is returned too. */
418 get_pointer_alignment_1 (tree exp
, unsigned int *alignp
,
419 unsigned HOST_WIDE_INT
*bitposp
)
423 if (TREE_CODE (exp
) == ADDR_EXPR
)
424 return get_object_alignment_2 (TREE_OPERAND (exp
, 0),
425 alignp
, bitposp
, true);
426 else if (TREE_CODE (exp
) == POINTER_PLUS_EXPR
)
429 unsigned HOST_WIDE_INT bitpos
;
430 bool res
= get_pointer_alignment_1 (TREE_OPERAND (exp
, 0),
432 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == INTEGER_CST
)
433 bitpos
+= TREE_INT_CST_LOW (TREE_OPERAND (exp
, 1)) * BITS_PER_UNIT
;
436 unsigned int trailing_zeros
= tree_ctz (TREE_OPERAND (exp
, 1));
437 if (trailing_zeros
< HOST_BITS_PER_INT
)
439 unsigned int inner
= (1U << trailing_zeros
) * BITS_PER_UNIT
;
441 align
= MIN (align
, inner
);
445 *bitposp
= bitpos
& (align
- 1);
448 else if (TREE_CODE (exp
) == SSA_NAME
449 && POINTER_TYPE_P (TREE_TYPE (exp
)))
451 unsigned int ptr_align
, ptr_misalign
;
452 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (exp
);
454 if (pi
&& get_ptr_info_alignment (pi
, &ptr_align
, &ptr_misalign
))
456 *bitposp
= ptr_misalign
* BITS_PER_UNIT
;
457 *alignp
= ptr_align
* BITS_PER_UNIT
;
458 /* Make sure to return a sensible alignment when the multiplication
459 by BITS_PER_UNIT overflowed. */
461 *alignp
= 1u << (HOST_BITS_PER_INT
- 1);
462 /* We cannot really tell whether this result is an approximation. */
468 *alignp
= BITS_PER_UNIT
;
472 else if (TREE_CODE (exp
) == INTEGER_CST
)
474 *alignp
= BIGGEST_ALIGNMENT
;
475 *bitposp
= ((TREE_INT_CST_LOW (exp
) * BITS_PER_UNIT
)
476 & (BIGGEST_ALIGNMENT
- 1));
481 *alignp
= BITS_PER_UNIT
;
485 /* Return the alignment in bits of EXP, a pointer valued expression.
486 The alignment returned is, by default, the alignment of the thing that
487 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
489 Otherwise, look at the expression to see if we can do better, i.e., if the
490 expression is actually pointing at an object whose alignment is tighter. */
493 get_pointer_alignment (tree exp
)
495 unsigned HOST_WIDE_INT bitpos
= 0;
498 get_pointer_alignment_1 (exp
, &align
, &bitpos
);
500 /* align and bitpos now specify known low bits of the pointer.
501 ptr & (align - 1) == bitpos. */
504 align
= least_bit_hwi (bitpos
);
509 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
510 way, because it could contain a zero byte in the middle.
511 TREE_STRING_LENGTH is the size of the character array, not the string.
513 ONLY_VALUE should be nonzero if the result is not going to be emitted
514 into the instruction stream and zero if it is going to be expanded.
515 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
516 is returned, otherwise NULL, since
517 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
518 evaluate the side-effects.
520 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
521 accesses. Note that this implies the result is not going to be emitted
522 into the instruction stream.
524 The value returned is of type `ssizetype'.
526 Unfortunately, string_constant can't access the values of const char
527 arrays with initializers, so neither can we do so here. */
530 c_strlen (tree src
, int only_value
)
533 HOST_WIDE_INT offset
;
539 if (TREE_CODE (src
) == COND_EXPR
540 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
544 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
545 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
546 if (tree_int_cst_equal (len1
, len2
))
550 if (TREE_CODE (src
) == COMPOUND_EXPR
551 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
552 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
554 loc
= EXPR_LOC_OR_LOC (src
, input_location
);
556 src
= string_constant (src
, &offset_node
);
560 max
= TREE_STRING_LENGTH (src
) - 1;
561 ptr
= TREE_STRING_POINTER (src
);
563 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
565 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
566 compute the offset to the following null if we don't know where to
567 start searching for it. */
570 for (i
= 0; i
< max
; i
++)
574 /* We don't know the starting offset, but we do know that the string
575 has no internal zero bytes. We can assume that the offset falls
576 within the bounds of the string; otherwise, the programmer deserves
577 what he gets. Subtract the offset from the length of the string,
578 and return that. This would perhaps not be valid if we were dealing
579 with named arrays in addition to literal string constants. */
581 return size_diffop_loc (loc
, size_int (max
), offset_node
);
584 /* We have a known offset into the string. Start searching there for
585 a null character if we can represent it as a single HOST_WIDE_INT. */
586 if (offset_node
== 0)
588 else if (! tree_fits_shwi_p (offset_node
))
591 offset
= tree_to_shwi (offset_node
);
593 /* If the offset is known to be out of bounds, warn, and call strlen at
595 if (offset
< 0 || offset
> max
)
597 /* Suppress multiple warnings for propagated constant strings. */
599 && !TREE_NO_WARNING (src
))
601 warning_at (loc
, 0, "offset outside bounds of constant string");
602 TREE_NO_WARNING (src
) = 1;
607 /* Use strlen to search for the first zero byte. Since any strings
608 constructed with build_string will have nulls appended, we win even
609 if we get handed something like (char[4])"abcd".
611 Since OFFSET is our starting index into the string, no further
612 calculation is needed. */
613 return ssize_int (strlen (ptr
+ offset
));
616 /* Return a constant integer corresponding to target reading
617 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
620 c_readstr (const char *str
, machine_mode mode
)
624 HOST_WIDE_INT tmp
[MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
];
626 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
627 unsigned int len
= (GET_MODE_PRECISION (mode
) + HOST_BITS_PER_WIDE_INT
- 1)
628 / HOST_BITS_PER_WIDE_INT
;
630 gcc_assert (len
<= MAX_BITSIZE_MODE_ANY_INT
/ HOST_BITS_PER_WIDE_INT
);
631 for (i
= 0; i
< len
; i
++)
635 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
638 if (WORDS_BIG_ENDIAN
)
639 j
= GET_MODE_SIZE (mode
) - i
- 1;
640 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
641 && GET_MODE_SIZE (mode
) >= UNITS_PER_WORD
)
642 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
646 ch
= (unsigned char) str
[i
];
647 tmp
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
650 wide_int c
= wide_int::from_array (tmp
, len
, GET_MODE_PRECISION (mode
));
651 return immed_wide_int_const (c
, mode
);
654 /* Cast a target constant CST to target CHAR and if that value fits into
655 host char type, return zero and put that value into variable pointed to by
659 target_char_cast (tree cst
, char *p
)
661 unsigned HOST_WIDE_INT val
, hostval
;
663 if (TREE_CODE (cst
) != INTEGER_CST
664 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
667 /* Do not care if it fits or not right here. */
668 val
= TREE_INT_CST_LOW (cst
);
670 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
671 val
&= (HOST_WIDE_INT_1U
<< CHAR_TYPE_SIZE
) - 1;
674 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
675 hostval
&= (HOST_WIDE_INT_1U
<< HOST_BITS_PER_CHAR
) - 1;
684 /* Similar to save_expr, but assumes that arbitrary code is not executed
685 in between the multiple evaluations. In particular, we assume that a
686 non-addressable local variable will not be modified. */
689 builtin_save_expr (tree exp
)
691 if (TREE_CODE (exp
) == SSA_NAME
692 || (TREE_ADDRESSABLE (exp
) == 0
693 && (TREE_CODE (exp
) == PARM_DECL
694 || (VAR_P (exp
) && !TREE_STATIC (exp
)))))
697 return save_expr (exp
);
700 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
701 times to get the address of either a higher stack frame, or a return
702 address located within it (depending on FNDECL_CODE). */
705 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
708 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
711 /* For a zero count with __builtin_return_address, we don't care what
712 frame address we return, because target-specific definitions will
713 override us. Therefore frame pointer elimination is OK, and using
714 the soft frame pointer is OK.
716 For a nonzero count, or a zero count with __builtin_frame_address,
717 we require a stable offset from the current frame pointer to the
718 previous one, so we must use the hard frame pointer, and
719 we must disable frame pointer elimination. */
720 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
721 tem
= frame_pointer_rtx
;
724 tem
= hard_frame_pointer_rtx
;
726 /* Tell reload not to eliminate the frame pointer. */
727 crtl
->accesses_prior_frames
= 1;
732 SETUP_FRAME_ADDRESSES ();
734 /* On the SPARC, the return address is not in the frame, it is in a
735 register. There is no way to access it off of the current frame
736 pointer, but it can be accessed off the previous frame pointer by
737 reading the value from the register window save area. */
738 if (RETURN_ADDR_IN_PREVIOUS_FRAME
&& fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
741 /* Scan back COUNT frames to the specified frame. */
742 for (i
= 0; i
< count
; i
++)
744 /* Assume the dynamic chain pointer is in the word that the
745 frame address points to, unless otherwise specified. */
746 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
747 tem
= memory_address (Pmode
, tem
);
748 tem
= gen_frame_mem (Pmode
, tem
);
749 tem
= copy_to_reg (tem
);
752 /* For __builtin_frame_address, return what we've got. But, on
753 the SPARC for example, we may have to add a bias. */
754 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
755 return FRAME_ADDR_RTX (tem
);
757 /* For __builtin_return_address, get the return address from that frame. */
758 #ifdef RETURN_ADDR_RTX
759 tem
= RETURN_ADDR_RTX (count
, tem
);
761 tem
= memory_address (Pmode
,
762 plus_constant (Pmode
, tem
, GET_MODE_SIZE (Pmode
)));
763 tem
= gen_frame_mem (Pmode
, tem
);
768 /* Alias set used for setjmp buffer. */
769 static alias_set_type setjmp_alias_set
= -1;
771 /* Construct the leading half of a __builtin_setjmp call. Control will
772 return to RECEIVER_LABEL. This is also called directly by the SJLJ
773 exception handling code. */
776 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
778 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
782 if (setjmp_alias_set
== -1)
783 setjmp_alias_set
= new_alias_set ();
785 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
787 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
789 /* We store the frame pointer and the address of receiver_label in
790 the buffer and use the rest of it for the stack save area, which
791 is machine-dependent. */
793 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
794 set_mem_alias_set (mem
, setjmp_alias_set
);
795 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
797 mem
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
798 GET_MODE_SIZE (Pmode
))),
799 set_mem_alias_set (mem
, setjmp_alias_set
);
801 emit_move_insn (validize_mem (mem
),
802 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
804 stack_save
= gen_rtx_MEM (sa_mode
,
805 plus_constant (Pmode
, buf_addr
,
806 2 * GET_MODE_SIZE (Pmode
)));
807 set_mem_alias_set (stack_save
, setjmp_alias_set
);
808 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
810 /* If there is further processing to do, do it. */
811 if (targetm
.have_builtin_setjmp_setup ())
812 emit_insn (targetm
.gen_builtin_setjmp_setup (buf_addr
));
814 /* We have a nonlocal label. */
815 cfun
->has_nonlocal_label
= 1;
818 /* Construct the trailing part of a __builtin_setjmp call. This is
819 also called directly by the SJLJ exception handling code.
820 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
823 expand_builtin_setjmp_receiver (rtx receiver_label
)
827 /* Mark the FP as used when we get here, so we have to make sure it's
828 marked as used by this function. */
829 emit_use (hard_frame_pointer_rtx
);
831 /* Mark the static chain as clobbered here so life information
832 doesn't get messed up for it. */
833 chain
= targetm
.calls
.static_chain (current_function_decl
, true);
834 if (chain
&& REG_P (chain
))
835 emit_clobber (chain
);
837 /* Now put in the code to restore the frame pointer, and argument
838 pointer, if needed. */
839 if (! targetm
.have_nonlocal_goto ())
841 /* First adjust our frame pointer to its actual value. It was
842 previously set to the start of the virtual area corresponding to
843 the stacked variables when we branched here and now needs to be
844 adjusted to the actual hardware fp value.
846 Assignments to virtual registers are converted by
847 instantiate_virtual_regs into the corresponding assignment
848 to the underlying register (fp in this case) that makes
849 the original assignment true.
850 So the following insn will actually be decrementing fp by
851 STARTING_FRAME_OFFSET. */
852 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
854 /* Restoring the frame pointer also modifies the hard frame pointer.
855 Mark it used (so that the previous assignment remains live once
856 the frame pointer is eliminated) and clobbered (to represent the
857 implicit update from the assignment). */
858 emit_use (hard_frame_pointer_rtx
);
859 emit_clobber (hard_frame_pointer_rtx
);
862 if (!HARD_FRAME_POINTER_IS_ARG_POINTER
&& fixed_regs
[ARG_POINTER_REGNUM
])
864 /* If the argument pointer can be eliminated in favor of the
865 frame pointer, we don't need to restore it. We assume here
866 that if such an elimination is present, it can always be used.
867 This is the case on all known machines; if we don't make this
868 assumption, we do unnecessary saving on many machines. */
870 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
872 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
873 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
874 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
877 if (i
== ARRAY_SIZE (elim_regs
))
879 /* Now restore our arg pointer from the address at which it
880 was saved in our stack frame. */
881 emit_move_insn (crtl
->args
.internal_arg_pointer
,
882 copy_to_reg (get_arg_pointer_save_area ()));
886 if (receiver_label
!= NULL
&& targetm
.have_builtin_setjmp_receiver ())
887 emit_insn (targetm
.gen_builtin_setjmp_receiver (receiver_label
));
888 else if (targetm
.have_nonlocal_goto_receiver ())
889 emit_insn (targetm
.gen_nonlocal_goto_receiver ());
893 /* We must not allow the code we just generated to be reordered by
894 scheduling. Specifically, the update of the frame pointer must
895 happen immediately, not later. */
896 emit_insn (gen_blockage ());
899 /* __builtin_longjmp is passed a pointer to an array of five words (not
900 all will be used on all machines). It operates similarly to the C
901 library function of the same name, but is more efficient. Much of
902 the code below is copied from the handling of non-local gotos. */
905 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
908 rtx_insn
*insn
, *last
;
909 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
911 /* DRAP is needed for stack realign if longjmp is expanded to current
913 if (SUPPORTS_STACK_ALIGNMENT
)
914 crtl
->need_drap
= true;
916 if (setjmp_alias_set
== -1)
917 setjmp_alias_set
= new_alias_set ();
919 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
921 buf_addr
= force_reg (Pmode
, buf_addr
);
923 /* We require that the user must pass a second argument of 1, because
924 that is what builtin_setjmp will return. */
925 gcc_assert (value
== const1_rtx
);
927 last
= get_last_insn ();
928 if (targetm
.have_builtin_longjmp ())
929 emit_insn (targetm
.gen_builtin_longjmp (buf_addr
));
932 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
933 lab
= gen_rtx_MEM (Pmode
, plus_constant (Pmode
, buf_addr
,
934 GET_MODE_SIZE (Pmode
)));
936 stack
= gen_rtx_MEM (sa_mode
, plus_constant (Pmode
, buf_addr
,
937 2 * GET_MODE_SIZE (Pmode
)));
938 set_mem_alias_set (fp
, setjmp_alias_set
);
939 set_mem_alias_set (lab
, setjmp_alias_set
);
940 set_mem_alias_set (stack
, setjmp_alias_set
);
942 /* Pick up FP, label, and SP from the block and jump. This code is
943 from expand_goto in stmt.c; see there for detailed comments. */
944 if (targetm
.have_nonlocal_goto ())
945 /* We have to pass a value to the nonlocal_goto pattern that will
946 get copied into the static_chain pointer, but it does not matter
947 what that value is, because builtin_setjmp does not use it. */
948 emit_insn (targetm
.gen_nonlocal_goto (value
, lab
, stack
, fp
));
951 lab
= copy_to_reg (lab
);
953 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
954 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
956 emit_move_insn (hard_frame_pointer_rtx
, fp
);
957 emit_stack_restore (SAVE_NONLOCAL
, stack
);
959 emit_use (hard_frame_pointer_rtx
);
960 emit_use (stack_pointer_rtx
);
961 emit_indirect_jump (lab
);
965 /* Search backwards and mark the jump insn as a non-local goto.
966 Note that this precludes the use of __builtin_longjmp to a
967 __builtin_setjmp target in the same function. However, we've
968 already cautioned the user that these functions are for
969 internal exception handling use only. */
970 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
972 gcc_assert (insn
!= last
);
976 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
979 else if (CALL_P (insn
))
985 more_const_call_expr_args_p (const const_call_expr_arg_iterator
*iter
)
987 return (iter
->i
< iter
->n
);
990 /* This function validates the types of a function call argument list
991 against a specified list of tree_codes. If the last specifier is a 0,
992 that represents an ellipses, otherwise the last specifier must be a
996 validate_arglist (const_tree callexpr
, ...)
1001 const_call_expr_arg_iterator iter
;
1004 va_start (ap
, callexpr
);
1005 init_const_call_expr_arg_iterator (callexpr
, &iter
);
1009 code
= (enum tree_code
) va_arg (ap
, int);
1013 /* This signifies an ellipses, any further arguments are all ok. */
1017 /* This signifies an endlink, if no arguments remain, return
1018 true, otherwise return false. */
1019 res
= !more_const_call_expr_args_p (&iter
);
1022 /* If no parameters remain or the parameter's code does not
1023 match the specified code, return false. Otherwise continue
1024 checking any remaining arguments. */
1025 arg
= next_const_call_expr_arg (&iter
);
1026 if (!validate_arg (arg
, code
))
1033 /* We need gotos here since we can only have one VA_CLOSE in a
1041 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1042 and the address of the save area. */
1045 expand_builtin_nonlocal_goto (tree exp
)
1047 tree t_label
, t_save_area
;
1048 rtx r_label
, r_save_area
, r_fp
, r_sp
;
1051 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
1054 t_label
= CALL_EXPR_ARG (exp
, 0);
1055 t_save_area
= CALL_EXPR_ARG (exp
, 1);
1057 r_label
= expand_normal (t_label
);
1058 r_label
= convert_memory_address (Pmode
, r_label
);
1059 r_save_area
= expand_normal (t_save_area
);
1060 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
1061 /* Copy the address of the save location to a register just in case it was
1062 based on the frame pointer. */
1063 r_save_area
= copy_to_reg (r_save_area
);
1064 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
1065 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
1066 plus_constant (Pmode
, r_save_area
,
1067 GET_MODE_SIZE (Pmode
)));
1069 crtl
->has_nonlocal_goto
= 1;
1071 /* ??? We no longer need to pass the static chain value, afaik. */
1072 if (targetm
.have_nonlocal_goto ())
1073 emit_insn (targetm
.gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
1076 r_label
= copy_to_reg (r_label
);
1078 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1079 emit_clobber (gen_rtx_MEM (BLKmode
, hard_frame_pointer_rtx
));
1081 /* Restore frame pointer for containing function. */
1082 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
1083 emit_stack_restore (SAVE_NONLOCAL
, r_sp
);
1085 /* USE of hard_frame_pointer_rtx added for consistency;
1086 not clear if really needed. */
1087 emit_use (hard_frame_pointer_rtx
);
1088 emit_use (stack_pointer_rtx
);
1090 /* If the architecture is using a GP register, we must
1091 conservatively assume that the target function makes use of it.
1092 The prologue of functions with nonlocal gotos must therefore
1093 initialize the GP register to the appropriate value, and we
1094 must then make sure that this value is live at the point
1095 of the jump. (Note that this doesn't necessarily apply
1096 to targets with a nonlocal_goto pattern; they are free
1097 to implement it in their own way. Note also that this is
1098 a no-op if the GP register is a global invariant.) */
1099 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
1100 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
1101 emit_use (pic_offset_table_rtx
);
1103 emit_indirect_jump (r_label
);
1106 /* Search backwards to the jump insn and mark it as a
1108 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
1112 add_reg_note (insn
, REG_NON_LOCAL_GOTO
, const0_rtx
);
1115 else if (CALL_P (insn
))
1122 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1123 (not all will be used on all machines) that was passed to __builtin_setjmp.
1124 It updates the stack pointer in that block to the current value. This is
1125 also called directly by the SJLJ exception handling code. */
1128 expand_builtin_update_setjmp_buf (rtx buf_addr
)
1130 machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
1132 = gen_rtx_MEM (sa_mode
,
1135 plus_constant (Pmode
, buf_addr
,
1136 2 * GET_MODE_SIZE (Pmode
))));
1138 emit_stack_save (SAVE_NONLOCAL
, &stack_save
);
1141 /* Expand a call to __builtin_prefetch. For a target that does not support
1142 data prefetch, evaluate the memory address argument in case it has side
1146 expand_builtin_prefetch (tree exp
)
1148 tree arg0
, arg1
, arg2
;
1152 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
1155 arg0
= CALL_EXPR_ARG (exp
, 0);
1157 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1158 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1160 nargs
= call_expr_nargs (exp
);
1162 arg1
= CALL_EXPR_ARG (exp
, 1);
1164 arg1
= integer_zero_node
;
1166 arg2
= CALL_EXPR_ARG (exp
, 2);
1168 arg2
= integer_three_node
;
1170 /* Argument 0 is an address. */
1171 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1173 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1174 if (TREE_CODE (arg1
) != INTEGER_CST
)
1176 error ("second argument to %<__builtin_prefetch%> must be a constant");
1177 arg1
= integer_zero_node
;
1179 op1
= expand_normal (arg1
);
1180 /* Argument 1 must be either zero or one. */
1181 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1183 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1188 /* Argument 2 (locality) must be a compile-time constant int. */
1189 if (TREE_CODE (arg2
) != INTEGER_CST
)
1191 error ("third argument to %<__builtin_prefetch%> must be a constant");
1192 arg2
= integer_zero_node
;
1194 op2
= expand_normal (arg2
);
1195 /* Argument 2 must be 0, 1, 2, or 3. */
1196 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1198 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1202 if (targetm
.have_prefetch ())
1204 struct expand_operand ops
[3];
1206 create_address_operand (&ops
[0], op0
);
1207 create_integer_operand (&ops
[1], INTVAL (op1
));
1208 create_integer_operand (&ops
[2], INTVAL (op2
));
1209 if (maybe_expand_insn (targetm
.code_for_prefetch
, 3, ops
))
1213 /* Don't do anything with direct references to volatile memory, but
1214 generate code to handle other side effects. */
1215 if (!MEM_P (op0
) && side_effects_p (op0
))
1219 /* Get a MEM rtx for expression EXP which is the address of an operand
1220 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1221 the maximum length of the block of memory that might be accessed or
1225 get_memory_rtx (tree exp
, tree len
)
1227 tree orig_exp
= exp
;
1230 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1231 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1232 if (TREE_CODE (exp
) == SAVE_EXPR
&& !SAVE_EXPR_RESOLVED_P (exp
))
1233 exp
= TREE_OPERAND (exp
, 0);
1235 addr
= expand_expr (orig_exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1236 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1238 /* Get an expression we can use to find the attributes to assign to MEM.
1239 First remove any nops. */
1240 while (CONVERT_EXPR_P (exp
)
1241 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1242 exp
= TREE_OPERAND (exp
, 0);
1244 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1245 (as builtin stringops may alias with anything). */
1246 exp
= fold_build2 (MEM_REF
,
1247 build_array_type (char_type_node
,
1248 build_range_type (sizetype
,
1249 size_one_node
, len
)),
1250 exp
, build_int_cst (ptr_type_node
, 0));
1252 /* If the MEM_REF has no acceptable address, try to get the base object
1253 from the original address we got, and build an all-aliasing
1254 unknown-sized access to that one. */
1255 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
1256 set_mem_attributes (mem
, exp
, 0);
1257 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
1258 && (exp
= get_base_address (TREE_OPERAND (TREE_OPERAND (exp
, 0),
1261 exp
= build_fold_addr_expr (exp
);
1262 exp
= fold_build2 (MEM_REF
,
1263 build_array_type (char_type_node
,
1264 build_range_type (sizetype
,
1267 exp
, build_int_cst (ptr_type_node
, 0));
1268 set_mem_attributes (mem
, exp
, 0);
1270 set_mem_alias_set (mem
, 0);
1274 /* Built-in functions to perform an untyped call and return. */
1276 #define apply_args_mode \
1277 (this_target_builtins->x_apply_args_mode)
1278 #define apply_result_mode \
1279 (this_target_builtins->x_apply_result_mode)
1281 /* Return the size required for the block returned by __builtin_apply_args,
1282 and initialize apply_args_mode. */
1285 apply_args_size (void)
1287 static int size
= -1;
1292 /* The values computed by this function never change. */
1295 /* The first value is the incoming arg-pointer. */
1296 size
= GET_MODE_SIZE (Pmode
);
1298 /* The second value is the structure value address unless this is
1299 passed as an "invisible" first argument. */
1300 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1301 size
+= GET_MODE_SIZE (Pmode
);
1303 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1304 if (FUNCTION_ARG_REGNO_P (regno
))
1306 mode
= targetm
.calls
.get_raw_arg_mode (regno
);
1308 gcc_assert (mode
!= VOIDmode
);
1310 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1311 if (size
% align
!= 0)
1312 size
= CEIL (size
, align
) * align
;
1313 size
+= GET_MODE_SIZE (mode
);
1314 apply_args_mode
[regno
] = mode
;
1318 apply_args_mode
[regno
] = VOIDmode
;
1324 /* Return the size required for the block returned by __builtin_apply,
1325 and initialize apply_result_mode. */
1328 apply_result_size (void)
1330 static int size
= -1;
1334 /* The values computed by this function never change. */
1339 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1340 if (targetm
.calls
.function_value_regno_p (regno
))
1342 mode
= targetm
.calls
.get_raw_result_mode (regno
);
1344 gcc_assert (mode
!= VOIDmode
);
1346 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1347 if (size
% align
!= 0)
1348 size
= CEIL (size
, align
) * align
;
1349 size
+= GET_MODE_SIZE (mode
);
1350 apply_result_mode
[regno
] = mode
;
1353 apply_result_mode
[regno
] = VOIDmode
;
1355 /* Allow targets that use untyped_call and untyped_return to override
1356 the size so that machine-specific information can be stored here. */
1357 #ifdef APPLY_RESULT_SIZE
1358 size
= APPLY_RESULT_SIZE
;
1364 /* Create a vector describing the result block RESULT. If SAVEP is true,
1365 the result block is used to save the values; otherwise it is used to
1366 restore the values. */
1369 result_vector (int savep
, rtx result
)
1371 int regno
, size
, align
, nelts
;
1374 rtx
*savevec
= XALLOCAVEC (rtx
, FIRST_PSEUDO_REGISTER
);
1377 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1378 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1380 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1381 if (size
% align
!= 0)
1382 size
= CEIL (size
, align
) * align
;
1383 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1384 mem
= adjust_address (result
, mode
, size
);
1385 savevec
[nelts
++] = (savep
1386 ? gen_rtx_SET (mem
, reg
)
1387 : gen_rtx_SET (reg
, mem
));
1388 size
+= GET_MODE_SIZE (mode
);
1390 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1393 /* Save the state required to perform an untyped call with the same
1394 arguments as were passed to the current function. */
1397 expand_builtin_apply_args_1 (void)
1400 int size
, align
, regno
;
1402 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1404 /* Create a block where the arg-pointer, structure value address,
1405 and argument registers can be saved. */
1406 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1408 /* Walk past the arg-pointer and structure value address. */
1409 size
= GET_MODE_SIZE (Pmode
);
1410 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1411 size
+= GET_MODE_SIZE (Pmode
);
1413 /* Save each register used in calling a function to the block. */
1414 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1415 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1417 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1418 if (size
% align
!= 0)
1419 size
= CEIL (size
, align
) * align
;
1421 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1423 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1424 size
+= GET_MODE_SIZE (mode
);
1427 /* Save the arg pointer to the block. */
1428 tem
= copy_to_reg (crtl
->args
.internal_arg_pointer
);
1429 /* We need the pointer as the caller actually passed them to us, not
1430 as we might have pretended they were passed. Make sure it's a valid
1431 operand, as emit_move_insn isn't expected to handle a PLUS. */
1432 if (STACK_GROWS_DOWNWARD
)
1434 = force_operand (plus_constant (Pmode
, tem
,
1435 crtl
->args
.pretend_args_size
),
1437 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1439 size
= GET_MODE_SIZE (Pmode
);
1441 /* Save the structure value address unless this is passed as an
1442 "invisible" first argument. */
1443 if (struct_incoming_value
)
1445 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1446 copy_to_reg (struct_incoming_value
));
1447 size
+= GET_MODE_SIZE (Pmode
);
1450 /* Return the address of the block. */
1451 return copy_addr_to_reg (XEXP (registers
, 0));
1454 /* __builtin_apply_args returns block of memory allocated on
1455 the stack into which is stored the arg pointer, structure
1456 value address, static chain, and all the registers that might
1457 possibly be used in performing a function call. The code is
1458 moved to the start of the function so the incoming values are
1462 expand_builtin_apply_args (void)
1464 /* Don't do __builtin_apply_args more than once in a function.
1465 Save the result of the first call and reuse it. */
1466 if (apply_args_value
!= 0)
1467 return apply_args_value
;
1469 /* When this function is called, it means that registers must be
1470 saved on entry to this function. So we migrate the
1471 call to the first insn of this function. */
1475 temp
= expand_builtin_apply_args_1 ();
1476 rtx_insn
*seq
= get_insns ();
1479 apply_args_value
= temp
;
1481 /* Put the insns after the NOTE that starts the function.
1482 If this is inside a start_sequence, make the outer-level insn
1483 chain current, so the code is placed at the start of the
1484 function. If internal_arg_pointer is a non-virtual pseudo,
1485 it needs to be placed after the function that initializes
1487 push_topmost_sequence ();
1488 if (REG_P (crtl
->args
.internal_arg_pointer
)
1489 && REGNO (crtl
->args
.internal_arg_pointer
) > LAST_VIRTUAL_REGISTER
)
1490 emit_insn_before (seq
, parm_birth_insn
);
1492 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1493 pop_topmost_sequence ();
1498 /* Perform an untyped call and save the state required to perform an
1499 untyped return of whatever value was returned by the given function. */
1502 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1504 int size
, align
, regno
;
1506 rtx incoming_args
, result
, reg
, dest
, src
;
1507 rtx_call_insn
*call_insn
;
1508 rtx old_stack_level
= 0;
1509 rtx call_fusage
= 0;
1510 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1512 arguments
= convert_memory_address (Pmode
, arguments
);
1514 /* Create a block where the return registers can be saved. */
1515 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1517 /* Fetch the arg pointer from the ARGUMENTS block. */
1518 incoming_args
= gen_reg_rtx (Pmode
);
1519 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1520 if (!STACK_GROWS_DOWNWARD
)
1521 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1522 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1524 /* Push a new argument block and copy the arguments. Do not allow
1525 the (potential) memcpy call below to interfere with our stack
1527 do_pending_stack_adjust ();
1530 /* Save the stack with nonlocal if available. */
1531 if (targetm
.have_save_stack_nonlocal ())
1532 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
);
1534 emit_stack_save (SAVE_BLOCK
, &old_stack_level
);
1536 /* Allocate a block of memory onto the stack and copy the memory
1537 arguments to the outgoing arguments address. We can pass TRUE
1538 as the 4th argument because we just saved the stack pointer
1539 and will restore it right after the call. */
1540 allocate_dynamic_stack_space (argsize
, 0, BIGGEST_ALIGNMENT
, true);
1542 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1543 may have already set current_function_calls_alloca to true.
1544 current_function_calls_alloca won't be set if argsize is zero,
1545 so we have to guarantee need_drap is true here. */
1546 if (SUPPORTS_STACK_ALIGNMENT
)
1547 crtl
->need_drap
= true;
1549 dest
= virtual_outgoing_args_rtx
;
1550 if (!STACK_GROWS_DOWNWARD
)
1552 if (CONST_INT_P (argsize
))
1553 dest
= plus_constant (Pmode
, dest
, -INTVAL (argsize
));
1555 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1557 dest
= gen_rtx_MEM (BLKmode
, dest
);
1558 set_mem_align (dest
, PARM_BOUNDARY
);
1559 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1560 set_mem_align (src
, PARM_BOUNDARY
);
1561 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1563 /* Refer to the argument block. */
1565 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1566 set_mem_align (arguments
, PARM_BOUNDARY
);
1568 /* Walk past the arg-pointer and structure value address. */
1569 size
= GET_MODE_SIZE (Pmode
);
1571 size
+= GET_MODE_SIZE (Pmode
);
1573 /* Restore each of the registers previously saved. Make USE insns
1574 for each of these registers for use in making the call. */
1575 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1576 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1578 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1579 if (size
% align
!= 0)
1580 size
= CEIL (size
, align
) * align
;
1581 reg
= gen_rtx_REG (mode
, regno
);
1582 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1583 use_reg (&call_fusage
, reg
);
1584 size
+= GET_MODE_SIZE (mode
);
1587 /* Restore the structure value address unless this is passed as an
1588 "invisible" first argument. */
1589 size
= GET_MODE_SIZE (Pmode
);
1592 rtx value
= gen_reg_rtx (Pmode
);
1593 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1594 emit_move_insn (struct_value
, value
);
1595 if (REG_P (struct_value
))
1596 use_reg (&call_fusage
, struct_value
);
1597 size
+= GET_MODE_SIZE (Pmode
);
1600 /* All arguments and registers used for the call are set up by now! */
1601 function
= prepare_call_address (NULL
, function
, NULL
, &call_fusage
, 0, 0);
1603 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1604 and we don't want to load it into a register as an optimization,
1605 because prepare_call_address already did it if it should be done. */
1606 if (GET_CODE (function
) != SYMBOL_REF
)
1607 function
= memory_address (FUNCTION_MODE
, function
);
1609 /* Generate the actual call instruction and save the return value. */
1610 if (targetm
.have_untyped_call ())
1612 rtx mem
= gen_rtx_MEM (FUNCTION_MODE
, function
);
1613 emit_call_insn (targetm
.gen_untyped_call (mem
, result
,
1614 result_vector (1, result
)));
1616 else if (targetm
.have_call_value ())
1620 /* Locate the unique return register. It is not possible to
1621 express a call that sets more than one return register using
1622 call_value; use untyped_call for that. In fact, untyped_call
1623 only needs to save the return registers in the given block. */
1624 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1625 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1627 gcc_assert (!valreg
); /* have_untyped_call required. */
1629 valreg
= gen_rtx_REG (mode
, regno
);
1632 emit_insn (targetm
.gen_call_value (valreg
,
1633 gen_rtx_MEM (FUNCTION_MODE
, function
),
1634 const0_rtx
, NULL_RTX
, const0_rtx
));
1636 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1641 /* Find the CALL insn we just emitted, and attach the register usage
1643 call_insn
= last_call_insn ();
1644 add_function_usage_to (call_insn
, call_fusage
);
1646 /* Restore the stack. */
1647 if (targetm
.have_save_stack_nonlocal ())
1648 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
);
1650 emit_stack_restore (SAVE_BLOCK
, old_stack_level
);
1651 fixup_args_size_notes (call_insn
, get_last_insn (), 0);
1655 /* Return the address of the result block. */
1656 result
= copy_addr_to_reg (XEXP (result
, 0));
1657 return convert_memory_address (ptr_mode
, result
);
1660 /* Perform an untyped return. */
1663 expand_builtin_return (rtx result
)
1665 int size
, align
, regno
;
1668 rtx_insn
*call_fusage
= 0;
1670 result
= convert_memory_address (Pmode
, result
);
1672 apply_result_size ();
1673 result
= gen_rtx_MEM (BLKmode
, result
);
1675 if (targetm
.have_untyped_return ())
1677 rtx vector
= result_vector (0, result
);
1678 emit_jump_insn (targetm
.gen_untyped_return (result
, vector
));
1683 /* Restore the return value and note that each value is used. */
1685 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1686 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1688 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1689 if (size
% align
!= 0)
1690 size
= CEIL (size
, align
) * align
;
1691 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1692 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1694 push_to_sequence (call_fusage
);
1696 call_fusage
= get_insns ();
1698 size
+= GET_MODE_SIZE (mode
);
1701 /* Put the USE insns before the return. */
1702 emit_insn (call_fusage
);
1704 /* Return whatever values was restored by jumping directly to the end
1706 expand_naked_return ();
1709 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1711 static enum type_class
1712 type_to_class (tree type
)
1714 switch (TREE_CODE (type
))
1716 case VOID_TYPE
: return void_type_class
;
1717 case INTEGER_TYPE
: return integer_type_class
;
1718 case ENUMERAL_TYPE
: return enumeral_type_class
;
1719 case BOOLEAN_TYPE
: return boolean_type_class
;
1720 case POINTER_TYPE
: return pointer_type_class
;
1721 case REFERENCE_TYPE
: return reference_type_class
;
1722 case OFFSET_TYPE
: return offset_type_class
;
1723 case REAL_TYPE
: return real_type_class
;
1724 case COMPLEX_TYPE
: return complex_type_class
;
1725 case FUNCTION_TYPE
: return function_type_class
;
1726 case METHOD_TYPE
: return method_type_class
;
1727 case RECORD_TYPE
: return record_type_class
;
1729 case QUAL_UNION_TYPE
: return union_type_class
;
1730 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1731 ? string_type_class
: array_type_class
);
1732 case LANG_TYPE
: return lang_type_class
;
1733 default: return no_type_class
;
1737 /* Expand a call EXP to __builtin_classify_type. */
1740 expand_builtin_classify_type (tree exp
)
1742 if (call_expr_nargs (exp
))
1743 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1744 return GEN_INT (no_type_class
);
1747 /* This helper macro, meant to be used in mathfn_built_in below,
1748 determines which among a set of three builtin math functions is
1749 appropriate for a given type mode. The `F' and `L' cases are
1750 automatically generated from the `double' case. */
1751 #define CASE_MATHFN(MATHFN) \
1752 CASE_CFN_##MATHFN: \
1753 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1754 fcodel = BUILT_IN_##MATHFN##L ; break;
1755 /* Similar to above, but appends _R after any F/L suffix. */
1756 #define CASE_MATHFN_REENT(MATHFN) \
1757 case CFN_BUILT_IN_##MATHFN##_R: \
1758 case CFN_BUILT_IN_##MATHFN##F_R: \
1759 case CFN_BUILT_IN_##MATHFN##L_R: \
1760 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1761 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1763 /* Return a function equivalent to FN but operating on floating-point
1764 values of type TYPE, or END_BUILTINS if no such function exists.
1765 This is purely an operation on function codes; it does not guarantee
1766 that the target actually has an implementation of the function. */
1768 static built_in_function
1769 mathfn_built_in_2 (tree type
, combined_fn fn
)
1771 built_in_function fcode
, fcodef
, fcodel
;
1785 CASE_MATHFN (COPYSIGN
)
1804 CASE_MATHFN_REENT (GAMMA
) /* GAMMA_R */
1805 CASE_MATHFN (HUGE_VAL
)
1809 CASE_MATHFN (IFLOOR
)
1812 CASE_MATHFN (IROUND
)
1819 CASE_MATHFN (LFLOOR
)
1820 CASE_MATHFN (LGAMMA
)
1821 CASE_MATHFN_REENT (LGAMMA
) /* LGAMMA_R */
1822 CASE_MATHFN (LLCEIL
)
1823 CASE_MATHFN (LLFLOOR
)
1824 CASE_MATHFN (LLRINT
)
1825 CASE_MATHFN (LLROUND
)
1832 CASE_MATHFN (LROUND
)
1836 CASE_MATHFN (NEARBYINT
)
1837 CASE_MATHFN (NEXTAFTER
)
1838 CASE_MATHFN (NEXTTOWARD
)
1842 CASE_MATHFN (REMAINDER
)
1843 CASE_MATHFN (REMQUO
)
1847 CASE_MATHFN (SCALBLN
)
1848 CASE_MATHFN (SCALBN
)
1849 CASE_MATHFN (SIGNBIT
)
1850 CASE_MATHFN (SIGNIFICAND
)
1852 CASE_MATHFN (SINCOS
)
1857 CASE_MATHFN (TGAMMA
)
1864 return END_BUILTINS
;
1867 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1869 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1871 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1874 return END_BUILTINS
;
1877 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1878 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1879 otherwise use the explicit declaration. If we can't do the conversion,
1883 mathfn_built_in_1 (tree type
, combined_fn fn
, bool implicit_p
)
1885 built_in_function fcode2
= mathfn_built_in_2 (type
, fn
);
1886 if (fcode2
== END_BUILTINS
)
1889 if (implicit_p
&& !builtin_decl_implicit_p (fcode2
))
1892 return builtin_decl_explicit (fcode2
);
1895 /* Like mathfn_built_in_1, but always use the implicit array. */
1898 mathfn_built_in (tree type
, combined_fn fn
)
1900 return mathfn_built_in_1 (type
, fn
, /*implicit=*/ 1);
1903 /* Like mathfn_built_in_1, but take a built_in_function and
1904 always use the implicit array. */
1907 mathfn_built_in (tree type
, enum built_in_function fn
)
1909 return mathfn_built_in_1 (type
, as_combined_fn (fn
), /*implicit=*/ 1);
1912 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1913 return its code, otherwise return IFN_LAST. Note that this function
1914 only tests whether the function is defined in internals.def, not whether
1915 it is actually available on the target. */
1918 associated_internal_fn (tree fndecl
)
1920 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
);
1921 tree return_type
= TREE_TYPE (TREE_TYPE (fndecl
));
1922 switch (DECL_FUNCTION_CODE (fndecl
))
1924 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1925 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1926 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1927 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1928 #include "internal-fn.def"
1930 CASE_FLT_FN (BUILT_IN_POW10
):
1933 CASE_FLT_FN (BUILT_IN_DREM
):
1934 return IFN_REMAINDER
;
1936 CASE_FLT_FN (BUILT_IN_SCALBN
):
1937 CASE_FLT_FN (BUILT_IN_SCALBLN
):
1938 if (REAL_MODE_FORMAT (TYPE_MODE (return_type
))->b
== 2)
1947 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
1948 on the current target by a call to an internal function, return the
1949 code of that internal function, otherwise return IFN_LAST. The caller
1950 is responsible for ensuring that any side-effects of the built-in
1951 call are dealt with correctly. E.g. if CALL sets errno, the caller
1952 must decide that the errno result isn't needed or make it available
1953 in some other way. */
1956 replacement_internal_fn (gcall
*call
)
1958 if (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
))
1960 internal_fn ifn
= associated_internal_fn (gimple_call_fndecl (call
));
1961 if (ifn
!= IFN_LAST
)
1963 tree_pair types
= direct_internal_fn_types (ifn
, call
);
1964 optimization_type opt_type
= bb_optimization_type (gimple_bb (call
));
1965 if (direct_internal_fn_supported_p (ifn
, types
, opt_type
))
1972 /* Expand a call to the builtin trinary math functions (fma).
1973 Return NULL_RTX if a normal call should be emitted rather than expanding the
1974 function in-line. EXP is the expression that is a call to the builtin
1975 function; if convenient, the result should be placed in TARGET.
1976 SUBTARGET may be used as the target for computing one of EXP's
1980 expand_builtin_mathfn_ternary (tree exp
, rtx target
, rtx subtarget
)
1982 optab builtin_optab
;
1983 rtx op0
, op1
, op2
, result
;
1985 tree fndecl
= get_callee_fndecl (exp
);
1986 tree arg0
, arg1
, arg2
;
1989 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
1992 arg0
= CALL_EXPR_ARG (exp
, 0);
1993 arg1
= CALL_EXPR_ARG (exp
, 1);
1994 arg2
= CALL_EXPR_ARG (exp
, 2);
1996 switch (DECL_FUNCTION_CODE (fndecl
))
1998 CASE_FLT_FN (BUILT_IN_FMA
):
1999 builtin_optab
= fma_optab
; break;
2004 /* Make a suitable register to place result in. */
2005 mode
= TYPE_MODE (TREE_TYPE (exp
));
2007 /* Before working hard, check whether the instruction is available. */
2008 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2011 result
= gen_reg_rtx (mode
);
2013 /* Always stabilize the argument list. */
2014 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2015 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2016 CALL_EXPR_ARG (exp
, 2) = arg2
= builtin_save_expr (arg2
);
2018 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2019 op1
= expand_normal (arg1
);
2020 op2
= expand_normal (arg2
);
2024 /* Compute into RESULT.
2025 Set RESULT to wherever the result comes back. */
2026 result
= expand_ternary_op (mode
, builtin_optab
, op0
, op1
, op2
,
2029 /* If we were unable to expand via the builtin, stop the sequence
2030 (without outputting the insns) and call to the library function
2031 with the stabilized argument list. */
2035 return expand_call (exp
, target
, target
== const0_rtx
);
2038 /* Output the entire sequence. */
2039 insns
= get_insns ();
2046 /* Expand a call to the builtin sin and cos math functions.
2047 Return NULL_RTX if a normal call should be emitted rather than expanding the
2048 function in-line. EXP is the expression that is a call to the builtin
2049 function; if convenient, the result should be placed in TARGET.
2050 SUBTARGET may be used as the target for computing one of EXP's
2054 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2056 optab builtin_optab
;
2059 tree fndecl
= get_callee_fndecl (exp
);
2063 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2066 arg
= CALL_EXPR_ARG (exp
, 0);
2068 switch (DECL_FUNCTION_CODE (fndecl
))
2070 CASE_FLT_FN (BUILT_IN_SIN
):
2071 CASE_FLT_FN (BUILT_IN_COS
):
2072 builtin_optab
= sincos_optab
; break;
2077 /* Make a suitable register to place result in. */
2078 mode
= TYPE_MODE (TREE_TYPE (exp
));
2080 /* Check if sincos insn is available, otherwise fallback
2081 to sin or cos insn. */
2082 if (optab_handler (builtin_optab
, mode
) == CODE_FOR_nothing
)
2083 switch (DECL_FUNCTION_CODE (fndecl
))
2085 CASE_FLT_FN (BUILT_IN_SIN
):
2086 builtin_optab
= sin_optab
; break;
2087 CASE_FLT_FN (BUILT_IN_COS
):
2088 builtin_optab
= cos_optab
; break;
2093 /* Before working hard, check whether the instruction is available. */
2094 if (optab_handler (builtin_optab
, mode
) != CODE_FOR_nothing
)
2096 rtx result
= gen_reg_rtx (mode
);
2098 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2099 need to expand the argument again. This way, we will not perform
2100 side-effects more the once. */
2101 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2103 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2107 /* Compute into RESULT.
2108 Set RESULT to wherever the result comes back. */
2109 if (builtin_optab
== sincos_optab
)
2113 switch (DECL_FUNCTION_CODE (fndecl
))
2115 CASE_FLT_FN (BUILT_IN_SIN
):
2116 ok
= expand_twoval_unop (builtin_optab
, op0
, 0, result
, 0);
2118 CASE_FLT_FN (BUILT_IN_COS
):
2119 ok
= expand_twoval_unop (builtin_optab
, op0
, result
, 0, 0);
2127 result
= expand_unop (mode
, builtin_optab
, op0
, result
, 0);
2131 /* Output the entire sequence. */
2132 insns
= get_insns ();
2138 /* If we were unable to expand via the builtin, stop the sequence
2139 (without outputting the insns) and call to the library function
2140 with the stabilized argument list. */
2144 return expand_call (exp
, target
, target
== const0_rtx
);
2147 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2148 return an RTL instruction code that implements the functionality.
2149 If that isn't possible or available return CODE_FOR_nothing. */
2151 static enum insn_code
2152 interclass_mathfn_icode (tree arg
, tree fndecl
)
2154 bool errno_set
= false;
2155 optab builtin_optab
= unknown_optab
;
2158 switch (DECL_FUNCTION_CODE (fndecl
))
2160 CASE_FLT_FN (BUILT_IN_ILOGB
):
2161 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2162 CASE_FLT_FN (BUILT_IN_ISINF
):
2163 builtin_optab
= isinf_optab
; break;
2164 case BUILT_IN_ISNORMAL
:
2165 case BUILT_IN_ISFINITE
:
2166 CASE_FLT_FN (BUILT_IN_FINITE
):
2167 case BUILT_IN_FINITED32
:
2168 case BUILT_IN_FINITED64
:
2169 case BUILT_IN_FINITED128
:
2170 case BUILT_IN_ISINFD32
:
2171 case BUILT_IN_ISINFD64
:
2172 case BUILT_IN_ISINFD128
:
2173 /* These builtins have no optabs (yet). */
2179 /* There's no easy way to detect the case we need to set EDOM. */
2180 if (flag_errno_math
&& errno_set
)
2181 return CODE_FOR_nothing
;
2183 /* Optab mode depends on the mode of the input argument. */
2184 mode
= TYPE_MODE (TREE_TYPE (arg
));
2187 return optab_handler (builtin_optab
, mode
);
2188 return CODE_FOR_nothing
;
2191 /* Expand a call to one of the builtin math functions that operate on
2192 floating point argument and output an integer result (ilogb, isinf,
2194 Return 0 if a normal call should be emitted rather than expanding the
2195 function in-line. EXP is the expression that is a call to the builtin
2196 function; if convenient, the result should be placed in TARGET. */
2199 expand_builtin_interclass_mathfn (tree exp
, rtx target
)
2201 enum insn_code icode
= CODE_FOR_nothing
;
2203 tree fndecl
= get_callee_fndecl (exp
);
2207 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2210 arg
= CALL_EXPR_ARG (exp
, 0);
2211 icode
= interclass_mathfn_icode (arg
, fndecl
);
2212 mode
= TYPE_MODE (TREE_TYPE (arg
));
2214 if (icode
!= CODE_FOR_nothing
)
2216 struct expand_operand ops
[1];
2217 rtx_insn
*last
= get_last_insn ();
2218 tree orig_arg
= arg
;
2220 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2221 need to expand the argument again. This way, we will not perform
2222 side-effects more the once. */
2223 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2225 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2227 if (mode
!= GET_MODE (op0
))
2228 op0
= convert_to_mode (mode
, op0
, 0);
2230 create_output_operand (&ops
[0], target
, TYPE_MODE (TREE_TYPE (exp
)));
2231 if (maybe_legitimize_operands (icode
, 0, 1, ops
)
2232 && maybe_emit_unop_insn (icode
, ops
[0].value
, op0
, UNKNOWN
))
2233 return ops
[0].value
;
2235 delete_insns_since (last
);
2236 CALL_EXPR_ARG (exp
, 0) = orig_arg
;
2242 /* Expand a call to the builtin sincos math function.
2243 Return NULL_RTX if a normal call should be emitted rather than expanding the
2244 function in-line. EXP is the expression that is a call to the builtin
2248 expand_builtin_sincos (tree exp
)
2250 rtx op0
, op1
, op2
, target1
, target2
;
2252 tree arg
, sinp
, cosp
;
2254 location_t loc
= EXPR_LOCATION (exp
);
2255 tree alias_type
, alias_off
;
2257 if (!validate_arglist (exp
, REAL_TYPE
,
2258 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2261 arg
= CALL_EXPR_ARG (exp
, 0);
2262 sinp
= CALL_EXPR_ARG (exp
, 1);
2263 cosp
= CALL_EXPR_ARG (exp
, 2);
2265 /* Make a suitable register to place result in. */
2266 mode
= TYPE_MODE (TREE_TYPE (arg
));
2268 /* Check if sincos insn is available, otherwise emit the call. */
2269 if (optab_handler (sincos_optab
, mode
) == CODE_FOR_nothing
)
2272 target1
= gen_reg_rtx (mode
);
2273 target2
= gen_reg_rtx (mode
);
2275 op0
= expand_normal (arg
);
2276 alias_type
= build_pointer_type_for_mode (TREE_TYPE (arg
), ptr_mode
, true);
2277 alias_off
= build_int_cst (alias_type
, 0);
2278 op1
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2280 op2
= expand_normal (fold_build2_loc (loc
, MEM_REF
, TREE_TYPE (arg
),
2283 /* Compute into target1 and target2.
2284 Set TARGET to wherever the result comes back. */
2285 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2286 gcc_assert (result
);
2288 /* Move target1 and target2 to the memory locations indicated
2290 emit_move_insn (op1
, target1
);
2291 emit_move_insn (op2
, target2
);
2296 /* Expand a call to the internal cexpi builtin to the sincos math function.
2297 EXP is the expression that is a call to the builtin function; if convenient,
2298 the result should be placed in TARGET. */
2301 expand_builtin_cexpi (tree exp
, rtx target
)
2303 tree fndecl
= get_callee_fndecl (exp
);
2307 location_t loc
= EXPR_LOCATION (exp
);
2309 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2312 arg
= CALL_EXPR_ARG (exp
, 0);
2313 type
= TREE_TYPE (arg
);
2314 mode
= TYPE_MODE (TREE_TYPE (arg
));
2316 /* Try expanding via a sincos optab, fall back to emitting a libcall
2317 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2318 is only generated from sincos, cexp or if we have either of them. */
2319 if (optab_handler (sincos_optab
, mode
) != CODE_FOR_nothing
)
2321 op1
= gen_reg_rtx (mode
);
2322 op2
= gen_reg_rtx (mode
);
2324 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
2326 /* Compute into op1 and op2. */
2327 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2329 else if (targetm
.libc_has_function (function_sincos
))
2331 tree call
, fn
= NULL_TREE
;
2335 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2336 fn
= builtin_decl_explicit (BUILT_IN_SINCOSF
);
2337 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2338 fn
= builtin_decl_explicit (BUILT_IN_SINCOS
);
2339 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2340 fn
= builtin_decl_explicit (BUILT_IN_SINCOSL
);
2344 op1
= assign_temp (TREE_TYPE (arg
), 1, 1);
2345 op2
= assign_temp (TREE_TYPE (arg
), 1, 1);
2346 op1a
= copy_addr_to_reg (XEXP (op1
, 0));
2347 op2a
= copy_addr_to_reg (XEXP (op2
, 0));
2348 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2349 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2351 /* Make sure not to fold the sincos call again. */
2352 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2353 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2354 call
, 3, arg
, top1
, top2
));
2358 tree call
, fn
= NULL_TREE
, narg
;
2359 tree ctype
= build_complex_type (type
);
2361 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2362 fn
= builtin_decl_explicit (BUILT_IN_CEXPF
);
2363 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2364 fn
= builtin_decl_explicit (BUILT_IN_CEXP
);
2365 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2366 fn
= builtin_decl_explicit (BUILT_IN_CEXPL
);
2370 /* If we don't have a decl for cexp create one. This is the
2371 friendliest fallback if the user calls __builtin_cexpi
2372 without full target C99 function support. */
2373 if (fn
== NULL_TREE
)
2376 const char *name
= NULL
;
2378 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2380 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2382 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2385 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2386 fn
= build_fn_decl (name
, fntype
);
2389 narg
= fold_build2_loc (loc
, COMPLEX_EXPR
, ctype
,
2390 build_real (type
, dconst0
), arg
);
2392 /* Make sure not to fold the cexp call again. */
2393 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2394 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2395 target
, VOIDmode
, EXPAND_NORMAL
);
2398 /* Now build the proper return type. */
2399 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2400 make_tree (TREE_TYPE (arg
), op2
),
2401 make_tree (TREE_TYPE (arg
), op1
)),
2402 target
, VOIDmode
, EXPAND_NORMAL
);
2405 /* Conveniently construct a function call expression. FNDECL names the
2406 function to be called, N is the number of arguments, and the "..."
2407 parameters are the argument expressions. Unlike build_call_exr
2408 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2411 build_call_nofold_loc (location_t loc
, tree fndecl
, int n
, ...)
2414 tree fntype
= TREE_TYPE (fndecl
);
2415 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
2418 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
2420 SET_EXPR_LOCATION (fn
, loc
);
2424 /* Expand a call to one of the builtin rounding functions gcc defines
2425 as an extension (lfloor and lceil). As these are gcc extensions we
2426 do not need to worry about setting errno to EDOM.
2427 If expanding via optab fails, lower expression to (int)(floor(x)).
2428 EXP is the expression that is a call to the builtin function;
2429 if convenient, the result should be placed in TARGET. */
2432 expand_builtin_int_roundingfn (tree exp
, rtx target
)
2434 convert_optab builtin_optab
;
2437 tree fndecl
= get_callee_fndecl (exp
);
2438 enum built_in_function fallback_fn
;
2439 tree fallback_fndecl
;
2443 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2446 arg
= CALL_EXPR_ARG (exp
, 0);
2448 switch (DECL_FUNCTION_CODE (fndecl
))
2450 CASE_FLT_FN (BUILT_IN_ICEIL
):
2451 CASE_FLT_FN (BUILT_IN_LCEIL
):
2452 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2453 builtin_optab
= lceil_optab
;
2454 fallback_fn
= BUILT_IN_CEIL
;
2457 CASE_FLT_FN (BUILT_IN_IFLOOR
):
2458 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2459 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2460 builtin_optab
= lfloor_optab
;
2461 fallback_fn
= BUILT_IN_FLOOR
;
2468 /* Make a suitable register to place result in. */
2469 mode
= TYPE_MODE (TREE_TYPE (exp
));
2471 target
= gen_reg_rtx (mode
);
2473 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2474 need to expand the argument again. This way, we will not perform
2475 side-effects more the once. */
2476 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2478 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2482 /* Compute into TARGET. */
2483 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2485 /* Output the entire sequence. */
2486 insns
= get_insns ();
2492 /* If we were unable to expand via the builtin, stop the sequence
2493 (without outputting the insns). */
2496 /* Fall back to floating point rounding optab. */
2497 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2499 /* For non-C99 targets we may end up without a fallback fndecl here
2500 if the user called __builtin_lfloor directly. In this case emit
2501 a call to the floor/ceil variants nevertheless. This should result
2502 in the best user experience for not full C99 targets. */
2503 if (fallback_fndecl
== NULL_TREE
)
2506 const char *name
= NULL
;
2508 switch (DECL_FUNCTION_CODE (fndecl
))
2510 case BUILT_IN_ICEIL
:
2511 case BUILT_IN_LCEIL
:
2512 case BUILT_IN_LLCEIL
:
2515 case BUILT_IN_ICEILF
:
2516 case BUILT_IN_LCEILF
:
2517 case BUILT_IN_LLCEILF
:
2520 case BUILT_IN_ICEILL
:
2521 case BUILT_IN_LCEILL
:
2522 case BUILT_IN_LLCEILL
:
2525 case BUILT_IN_IFLOOR
:
2526 case BUILT_IN_LFLOOR
:
2527 case BUILT_IN_LLFLOOR
:
2530 case BUILT_IN_IFLOORF
:
2531 case BUILT_IN_LFLOORF
:
2532 case BUILT_IN_LLFLOORF
:
2535 case BUILT_IN_IFLOORL
:
2536 case BUILT_IN_LFLOORL
:
2537 case BUILT_IN_LLFLOORL
:
2544 fntype
= build_function_type_list (TREE_TYPE (arg
),
2545 TREE_TYPE (arg
), NULL_TREE
);
2546 fallback_fndecl
= build_fn_decl (name
, fntype
);
2549 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
), fallback_fndecl
, 1, arg
);
2551 tmp
= expand_normal (exp
);
2552 tmp
= maybe_emit_group_store (tmp
, TREE_TYPE (exp
));
2554 /* Truncate the result of floating point optab to integer
2555 via expand_fix (). */
2556 target
= gen_reg_rtx (mode
);
2557 expand_fix (target
, tmp
, 0);
2562 /* Expand a call to one of the builtin math functions doing integer
2564 Return 0 if a normal call should be emitted rather than expanding the
2565 function in-line. EXP is the expression that is a call to the builtin
2566 function; if convenient, the result should be placed in TARGET. */
2569 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
)
2571 convert_optab builtin_optab
;
2574 tree fndecl
= get_callee_fndecl (exp
);
2577 enum built_in_function fallback_fn
= BUILT_IN_NONE
;
2579 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2582 arg
= CALL_EXPR_ARG (exp
, 0);
2584 switch (DECL_FUNCTION_CODE (fndecl
))
2586 CASE_FLT_FN (BUILT_IN_IRINT
):
2587 fallback_fn
= BUILT_IN_LRINT
;
2589 CASE_FLT_FN (BUILT_IN_LRINT
):
2590 CASE_FLT_FN (BUILT_IN_LLRINT
):
2591 builtin_optab
= lrint_optab
;
2594 CASE_FLT_FN (BUILT_IN_IROUND
):
2595 fallback_fn
= BUILT_IN_LROUND
;
2597 CASE_FLT_FN (BUILT_IN_LROUND
):
2598 CASE_FLT_FN (BUILT_IN_LLROUND
):
2599 builtin_optab
= lround_optab
;
2606 /* There's no easy way to detect the case we need to set EDOM. */
2607 if (flag_errno_math
&& fallback_fn
== BUILT_IN_NONE
)
2610 /* Make a suitable register to place result in. */
2611 mode
= TYPE_MODE (TREE_TYPE (exp
));
2613 /* There's no easy way to detect the case we need to set EDOM. */
2614 if (!flag_errno_math
)
2616 rtx result
= gen_reg_rtx (mode
);
2618 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2619 need to expand the argument again. This way, we will not perform
2620 side-effects more the once. */
2621 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2623 op0
= expand_expr (arg
, NULL
, VOIDmode
, EXPAND_NORMAL
);
2627 if (expand_sfix_optab (result
, op0
, builtin_optab
))
2629 /* Output the entire sequence. */
2630 insns
= get_insns ();
2636 /* If we were unable to expand via the builtin, stop the sequence
2637 (without outputting the insns) and call to the library function
2638 with the stabilized argument list. */
2642 if (fallback_fn
!= BUILT_IN_NONE
)
2644 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2645 targets, (int) round (x) should never be transformed into
2646 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2647 a call to lround in the hope that the target provides at least some
2648 C99 functions. This should result in the best user experience for
2649 not full C99 targets. */
2650 tree fallback_fndecl
= mathfn_built_in_1
2651 (TREE_TYPE (arg
), as_combined_fn (fallback_fn
), 0);
2653 exp
= build_call_nofold_loc (EXPR_LOCATION (exp
),
2654 fallback_fndecl
, 1, arg
);
2656 target
= expand_call (exp
, NULL_RTX
, target
== const0_rtx
);
2657 target
= maybe_emit_group_store (target
, TREE_TYPE (exp
));
2658 return convert_to_mode (mode
, target
, 0);
2661 return expand_call (exp
, target
, target
== const0_rtx
);
2664 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2665 a normal call should be emitted rather than expanding the function
2666 in-line. EXP is the expression that is a call to the builtin
2667 function; if convenient, the result should be placed in TARGET. */
2670 expand_builtin_powi (tree exp
, rtx target
)
2677 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2680 arg0
= CALL_EXPR_ARG (exp
, 0);
2681 arg1
= CALL_EXPR_ARG (exp
, 1);
2682 mode
= TYPE_MODE (TREE_TYPE (exp
));
2684 /* Emit a libcall to libgcc. */
2686 /* Mode of the 2nd argument must match that of an int. */
2687 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
2689 if (target
== NULL_RTX
)
2690 target
= gen_reg_rtx (mode
);
2692 op0
= expand_expr (arg0
, NULL_RTX
, mode
, EXPAND_NORMAL
);
2693 if (GET_MODE (op0
) != mode
)
2694 op0
= convert_to_mode (mode
, op0
, 0);
2695 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
2696 if (GET_MODE (op1
) != mode2
)
2697 op1
= convert_to_mode (mode2
, op1
, 0);
2699 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
2700 target
, LCT_CONST
, mode
, 2,
2701 op0
, mode
, op1
, mode2
);
2706 /* Expand expression EXP which is a call to the strlen builtin. Return
2707 NULL_RTX if we failed the caller should emit a normal call, otherwise
2708 try to get the result in TARGET, if convenient. */
2711 expand_builtin_strlen (tree exp
, rtx target
,
2712 machine_mode target_mode
)
2714 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
2718 struct expand_operand ops
[4];
2721 tree src
= CALL_EXPR_ARG (exp
, 0);
2723 rtx_insn
*before_strlen
;
2724 machine_mode insn_mode
= target_mode
;
2725 enum insn_code icode
= CODE_FOR_nothing
;
2728 /* If the length can be computed at compile-time, return it. */
2729 len
= c_strlen (src
, 0);
2731 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2733 /* If the length can be computed at compile-time and is constant
2734 integer, but there are side-effects in src, evaluate
2735 src for side-effects, then return len.
2736 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2737 can be optimized into: i++; x = 3; */
2738 len
= c_strlen (src
, 1);
2739 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
2741 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2742 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2745 align
= get_pointer_alignment (src
) / BITS_PER_UNIT
;
2747 /* If SRC is not a pointer type, don't do this operation inline. */
2751 /* Bail out if we can't compute strlen in the right mode. */
2752 while (insn_mode
!= VOIDmode
)
2754 icode
= optab_handler (strlen_optab
, insn_mode
);
2755 if (icode
!= CODE_FOR_nothing
)
2758 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
2760 if (insn_mode
== VOIDmode
)
2763 /* Make a place to hold the source address. We will not expand
2764 the actual source until we are sure that the expansion will
2765 not fail -- there are trees that cannot be expanded twice. */
2766 src_reg
= gen_reg_rtx (Pmode
);
2768 /* Mark the beginning of the strlen sequence so we can emit the
2769 source operand later. */
2770 before_strlen
= get_last_insn ();
2772 create_output_operand (&ops
[0], target
, insn_mode
);
2773 create_fixed_operand (&ops
[1], gen_rtx_MEM (BLKmode
, src_reg
));
2774 create_integer_operand (&ops
[2], 0);
2775 create_integer_operand (&ops
[3], align
);
2776 if (!maybe_expand_insn (icode
, 4, ops
))
2779 /* Now that we are assured of success, expand the source. */
2781 pat
= expand_expr (src
, src_reg
, Pmode
, EXPAND_NORMAL
);
2784 #ifdef POINTERS_EXTEND_UNSIGNED
2785 if (GET_MODE (pat
) != Pmode
)
2786 pat
= convert_to_mode (Pmode
, pat
,
2787 POINTERS_EXTEND_UNSIGNED
);
2789 emit_move_insn (src_reg
, pat
);
2795 emit_insn_after (pat
, before_strlen
);
2797 emit_insn_before (pat
, get_insns ());
2799 /* Return the value in the proper mode for this function. */
2800 if (GET_MODE (ops
[0].value
) == target_mode
)
2801 target
= ops
[0].value
;
2802 else if (target
!= 0)
2803 convert_move (target
, ops
[0].value
, 0);
2805 target
= convert_to_mode (target_mode
, ops
[0].value
, 0);
2811 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2812 bytes from constant string DATA + OFFSET and return it as target
2816 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
2819 const char *str
= (const char *) data
;
2821 gcc_assert (offset
>= 0
2822 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
2823 <= strlen (str
) + 1));
2825 return c_readstr (str
+ offset
, mode
);
2828 /* LEN specify length of the block of memcpy/memset operation.
2829 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2830 In some cases we can make very likely guess on max size, then we
2831 set it into PROBABLE_MAX_SIZE. */
2834 determine_block_size (tree len
, rtx len_rtx
,
2835 unsigned HOST_WIDE_INT
*min_size
,
2836 unsigned HOST_WIDE_INT
*max_size
,
2837 unsigned HOST_WIDE_INT
*probable_max_size
)
2839 if (CONST_INT_P (len_rtx
))
2841 *min_size
= *max_size
= *probable_max_size
= UINTVAL (len_rtx
);
2847 enum value_range_type range_type
= VR_UNDEFINED
;
2849 /* Determine bounds from the type. */
2850 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len
))))
2851 *min_size
= tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len
)));
2854 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len
))))
2855 *probable_max_size
= *max_size
2856 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len
)));
2858 *probable_max_size
= *max_size
= GET_MODE_MASK (GET_MODE (len_rtx
));
2860 if (TREE_CODE (len
) == SSA_NAME
)
2861 range_type
= get_range_info (len
, &min
, &max
);
2862 if (range_type
== VR_RANGE
)
2864 if (wi::fits_uhwi_p (min
) && *min_size
< min
.to_uhwi ())
2865 *min_size
= min
.to_uhwi ();
2866 if (wi::fits_uhwi_p (max
) && *max_size
> max
.to_uhwi ())
2867 *probable_max_size
= *max_size
= max
.to_uhwi ();
2869 else if (range_type
== VR_ANTI_RANGE
)
2871 /* Anti range 0...N lets us to determine minimal size to N+1. */
2874 if (wi::fits_uhwi_p (max
) && max
.to_uhwi () + 1 != 0)
2875 *min_size
= max
.to_uhwi () + 1;
2883 Produce anti range allowing negative values of N. We still
2884 can use the information and make a guess that N is not negative.
2886 else if (!wi::leu_p (max
, 1 << 30) && wi::fits_uhwi_p (min
))
2887 *probable_max_size
= min
.to_uhwi () - 1;
2890 gcc_checking_assert (*max_size
<=
2891 (unsigned HOST_WIDE_INT
)
2892 GET_MODE_MASK (GET_MODE (len_rtx
)));
2895 /* Helper function to do the actual work for expand_builtin_memcpy. */
2898 expand_builtin_memcpy_args (tree dest
, tree src
, tree len
, rtx target
, tree exp
)
2900 const char *src_str
;
2901 unsigned int src_align
= get_pointer_alignment (src
);
2902 unsigned int dest_align
= get_pointer_alignment (dest
);
2903 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
2904 HOST_WIDE_INT expected_size
= -1;
2905 unsigned int expected_align
= 0;
2906 unsigned HOST_WIDE_INT min_size
;
2907 unsigned HOST_WIDE_INT max_size
;
2908 unsigned HOST_WIDE_INT probable_max_size
;
2910 /* If DEST is not a pointer type, call the normal function. */
2911 if (dest_align
== 0)
2914 /* If either SRC is not a pointer type, don't do this
2915 operation in-line. */
2919 if (currently_expanding_gimple_stmt
)
2920 stringop_block_profile (currently_expanding_gimple_stmt
,
2921 &expected_align
, &expected_size
);
2923 if (expected_align
< dest_align
)
2924 expected_align
= dest_align
;
2925 dest_mem
= get_memory_rtx (dest
, len
);
2926 set_mem_align (dest_mem
, dest_align
);
2927 len_rtx
= expand_normal (len
);
2928 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
2929 &probable_max_size
);
2930 src_str
= c_getstr (src
);
2932 /* If SRC is a string constant and block move would be done
2933 by pieces, we can avoid loading the string from memory
2934 and only stored the computed constants. */
2936 && CONST_INT_P (len_rtx
)
2937 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
2938 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
2939 CONST_CAST (char *, src_str
),
2942 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
2943 builtin_memcpy_read_str
,
2944 CONST_CAST (char *, src_str
),
2945 dest_align
, false, 0);
2946 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
2947 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
2951 src_mem
= get_memory_rtx (src
, len
);
2952 set_mem_align (src_mem
, src_align
);
2954 /* Copy word part most expediently. */
2955 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
2956 CALL_EXPR_TAILCALL (exp
)
2957 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
2958 expected_align
, expected_size
,
2959 min_size
, max_size
, probable_max_size
);
2963 dest_addr
= force_operand (XEXP (dest_mem
, 0), target
);
2964 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
2970 /* Expand a call EXP to the memcpy builtin.
2971 Return NULL_RTX if we failed, the caller should emit a normal call,
2972 otherwise try to get the result in TARGET, if convenient (and in
2973 mode MODE if that's convenient). */
2976 expand_builtin_memcpy (tree exp
, rtx target
)
2978 if (!validate_arglist (exp
,
2979 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2983 tree dest
= CALL_EXPR_ARG (exp
, 0);
2984 tree src
= CALL_EXPR_ARG (exp
, 1);
2985 tree len
= CALL_EXPR_ARG (exp
, 2);
2986 return expand_builtin_memcpy_args (dest
, src
, len
, target
, exp
);
2990 /* Expand an instrumented call EXP to the memcpy builtin.
2991 Return NULL_RTX if we failed, the caller should emit a normal call,
2992 otherwise try to get the result in TARGET, if convenient (and in
2993 mode MODE if that's convenient). */
2996 expand_builtin_memcpy_with_bounds (tree exp
, rtx target
)
2998 if (!validate_arglist (exp
,
2999 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3000 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3001 INTEGER_TYPE
, VOID_TYPE
))
3005 tree dest
= CALL_EXPR_ARG (exp
, 0);
3006 tree src
= CALL_EXPR_ARG (exp
, 2);
3007 tree len
= CALL_EXPR_ARG (exp
, 4);
3008 rtx res
= expand_builtin_memcpy_args (dest
, src
, len
, target
, exp
);
3010 /* Return src bounds with the result. */
3013 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3014 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3015 res
= chkp_join_splitted_slot (res
, bnd
);
3021 /* Expand a call EXP to the mempcpy builtin.
3022 Return NULL_RTX if we failed; the caller should emit a normal call,
3023 otherwise try to get the result in TARGET, if convenient (and in
3024 mode MODE if that's convenient). If ENDP is 0 return the
3025 destination pointer, if ENDP is 1 return the end pointer ala
3026 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3030 expand_builtin_mempcpy (tree exp
, rtx target
, machine_mode mode
)
3032 if (!validate_arglist (exp
,
3033 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3037 tree dest
= CALL_EXPR_ARG (exp
, 0);
3038 tree src
= CALL_EXPR_ARG (exp
, 1);
3039 tree len
= CALL_EXPR_ARG (exp
, 2);
3040 return expand_builtin_mempcpy_args (dest
, src
, len
,
3041 target
, mode
, /*endp=*/ 1,
3046 /* Expand an instrumented call EXP to the mempcpy builtin.
3047 Return NULL_RTX if we failed, the caller should emit a normal call,
3048 otherwise try to get the result in TARGET, if convenient (and in
3049 mode MODE if that's convenient). */
3052 expand_builtin_mempcpy_with_bounds (tree exp
, rtx target
, machine_mode mode
)
3054 if (!validate_arglist (exp
,
3055 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3056 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3057 INTEGER_TYPE
, VOID_TYPE
))
3061 tree dest
= CALL_EXPR_ARG (exp
, 0);
3062 tree src
= CALL_EXPR_ARG (exp
, 2);
3063 tree len
= CALL_EXPR_ARG (exp
, 4);
3064 rtx res
= expand_builtin_mempcpy_args (dest
, src
, len
, target
,
3067 /* Return src bounds with the result. */
3070 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3071 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3072 res
= chkp_join_splitted_slot (res
, bnd
);
3078 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3079 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3080 so that this can also be called without constructing an actual CALL_EXPR.
3081 The other arguments and return value are the same as for
3082 expand_builtin_mempcpy. */
3085 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
,
3086 rtx target
, machine_mode mode
, int endp
,
3089 tree fndecl
= get_callee_fndecl (orig_exp
);
3091 /* If return value is ignored, transform mempcpy into memcpy. */
3092 if (target
== const0_rtx
3093 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3094 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
))
3096 tree fn
= builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
);
3097 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3099 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3101 else if (target
== const0_rtx
3102 && builtin_decl_implicit_p (BUILT_IN_MEMCPY
))
3104 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3105 tree result
= build_call_nofold_loc (UNKNOWN_LOCATION
, fn
, 3,
3107 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3111 const char *src_str
;
3112 unsigned int src_align
= get_pointer_alignment (src
);
3113 unsigned int dest_align
= get_pointer_alignment (dest
);
3114 rtx dest_mem
, src_mem
, len_rtx
;
3116 /* If either SRC or DEST is not a pointer type, don't do this
3117 operation in-line. */
3118 if (dest_align
== 0 || src_align
== 0)
3121 /* If LEN is not constant, call the normal function. */
3122 if (! tree_fits_uhwi_p (len
))
3125 len_rtx
= expand_normal (len
);
3126 src_str
= c_getstr (src
);
3128 /* If SRC is a string constant and block move would be done
3129 by pieces, we can avoid loading the string from memory
3130 and only stored the computed constants. */
3132 && CONST_INT_P (len_rtx
)
3133 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3134 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3135 CONST_CAST (char *, src_str
),
3138 dest_mem
= get_memory_rtx (dest
, len
);
3139 set_mem_align (dest_mem
, dest_align
);
3140 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3141 builtin_memcpy_read_str
,
3142 CONST_CAST (char *, src_str
),
3143 dest_align
, false, endp
);
3144 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3145 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3149 if (CONST_INT_P (len_rtx
)
3150 && can_move_by_pieces (INTVAL (len_rtx
),
3151 MIN (dest_align
, src_align
)))
3153 dest_mem
= get_memory_rtx (dest
, len
);
3154 set_mem_align (dest_mem
, dest_align
);
3155 src_mem
= get_memory_rtx (src
, len
);
3156 set_mem_align (src_mem
, src_align
);
3157 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3158 MIN (dest_align
, src_align
), endp
);
3159 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3160 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3168 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3169 we failed, the caller should emit a normal call, otherwise try to
3170 get the result in TARGET, if convenient. If ENDP is 0 return the
3171 destination pointer, if ENDP is 1 return the end pointer ala
3172 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3176 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3178 struct expand_operand ops
[3];
3182 if (!targetm
.have_movstr ())
3185 dest_mem
= get_memory_rtx (dest
, NULL
);
3186 src_mem
= get_memory_rtx (src
, NULL
);
3189 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3190 dest_mem
= replace_equiv_address (dest_mem
, target
);
3193 create_output_operand (&ops
[0], endp
? target
: NULL_RTX
, Pmode
);
3194 create_fixed_operand (&ops
[1], dest_mem
);
3195 create_fixed_operand (&ops
[2], src_mem
);
3196 if (!maybe_expand_insn (targetm
.code_for_movstr
, 3, ops
))
3199 if (endp
&& target
!= const0_rtx
)
3201 target
= ops
[0].value
;
3202 /* movstr is supposed to set end to the address of the NUL
3203 terminator. If the caller requested a mempcpy-like return value,
3207 rtx tem
= plus_constant (GET_MODE (target
),
3208 gen_lowpart (GET_MODE (target
), target
), 1);
3209 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3215 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3216 NULL_RTX if we failed the caller should emit a normal call, otherwise
3217 try to get the result in TARGET, if convenient (and in mode MODE if that's
3221 expand_builtin_strcpy (tree exp
, rtx target
)
3223 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3225 tree dest
= CALL_EXPR_ARG (exp
, 0);
3226 tree src
= CALL_EXPR_ARG (exp
, 1);
3227 return expand_builtin_strcpy_args (dest
, src
, target
);
3232 /* Helper function to do the actual work for expand_builtin_strcpy. The
3233 arguments to the builtin_strcpy call DEST and SRC are broken out
3234 so that this can also be called without constructing an actual CALL_EXPR.
3235 The other arguments and return value are the same as for
3236 expand_builtin_strcpy. */
3239 expand_builtin_strcpy_args (tree dest
, tree src
, rtx target
)
3241 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3244 /* Expand a call EXP to the stpcpy builtin.
3245 Return NULL_RTX if we failed the caller should emit a normal call,
3246 otherwise try to get the result in TARGET, if convenient (and in
3247 mode MODE if that's convenient). */
3250 expand_builtin_stpcpy (tree exp
, rtx target
, machine_mode mode
)
3253 location_t loc
= EXPR_LOCATION (exp
);
3255 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3258 dst
= CALL_EXPR_ARG (exp
, 0);
3259 src
= CALL_EXPR_ARG (exp
, 1);
3261 /* If return value is ignored, transform stpcpy into strcpy. */
3262 if (target
== const0_rtx
&& builtin_decl_implicit (BUILT_IN_STRCPY
))
3264 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3265 tree result
= build_call_nofold_loc (loc
, fn
, 2, dst
, src
);
3266 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3273 /* Ensure we get an actual string whose length can be evaluated at
3274 compile-time, not an expression containing a string. This is
3275 because the latter will potentially produce pessimized code
3276 when used to produce the return value. */
3277 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3278 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3280 lenp1
= size_binop_loc (loc
, PLUS_EXPR
, len
, ssize_int (1));
3281 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
,
3282 target
, mode
, /*endp=*/2,
3288 if (TREE_CODE (len
) == INTEGER_CST
)
3290 rtx len_rtx
= expand_normal (len
);
3292 if (CONST_INT_P (len_rtx
))
3294 ret
= expand_builtin_strcpy_args (dst
, src
, target
);
3300 if (mode
!= VOIDmode
)
3301 target
= gen_reg_rtx (mode
);
3303 target
= gen_reg_rtx (GET_MODE (ret
));
3305 if (GET_MODE (target
) != GET_MODE (ret
))
3306 ret
= gen_lowpart (GET_MODE (target
), ret
);
3308 ret
= plus_constant (GET_MODE (ret
), ret
, INTVAL (len_rtx
));
3309 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3317 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3321 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3322 bytes from constant string DATA + OFFSET and return it as target
3326 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3329 const char *str
= (const char *) data
;
3331 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3334 return c_readstr (str
+ offset
, mode
);
3337 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3338 NULL_RTX if we failed the caller should emit a normal call. */
3341 expand_builtin_strncpy (tree exp
, rtx target
)
3343 location_t loc
= EXPR_LOCATION (exp
);
3345 if (validate_arglist (exp
,
3346 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3348 tree dest
= CALL_EXPR_ARG (exp
, 0);
3349 tree src
= CALL_EXPR_ARG (exp
, 1);
3350 tree len
= CALL_EXPR_ARG (exp
, 2);
3351 tree slen
= c_strlen (src
, 1);
3353 /* We must be passed a constant len and src parameter. */
3354 if (!tree_fits_uhwi_p (len
) || !slen
|| !tree_fits_uhwi_p (slen
))
3357 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
3359 /* We're required to pad with trailing zeros if the requested
3360 len is greater than strlen(s2)+1. In that case try to
3361 use store_by_pieces, if it fails, punt. */
3362 if (tree_int_cst_lt (slen
, len
))
3364 unsigned int dest_align
= get_pointer_alignment (dest
);
3365 const char *p
= c_getstr (src
);
3368 if (!p
|| dest_align
== 0 || !tree_fits_uhwi_p (len
)
3369 || !can_store_by_pieces (tree_to_uhwi (len
),
3370 builtin_strncpy_read_str
,
3371 CONST_CAST (char *, p
),
3375 dest_mem
= get_memory_rtx (dest
, len
);
3376 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3377 builtin_strncpy_read_str
,
3378 CONST_CAST (char *, p
), dest_align
, false, 0);
3379 dest_mem
= force_operand (XEXP (dest_mem
, 0), target
);
3380 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3387 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3388 bytes from constant string DATA + OFFSET and return it as target
3392 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3395 const char *c
= (const char *) data
;
3396 char *p
= XALLOCAVEC (char, GET_MODE_SIZE (mode
));
3398 memset (p
, *c
, GET_MODE_SIZE (mode
));
3400 return c_readstr (p
, mode
);
3403 /* Callback routine for store_by_pieces. Return the RTL of a register
3404 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3405 char value given in the RTL register data. For example, if mode is
3406 4 bytes wide, return the RTL for 0x01010101*data. */
3409 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3416 size
= GET_MODE_SIZE (mode
);
3420 p
= XALLOCAVEC (char, size
);
3421 memset (p
, 1, size
);
3422 coeff
= c_readstr (p
, mode
);
3424 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3425 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3426 return force_reg (mode
, target
);
3429 /* Expand expression EXP, which is a call to the memset builtin. Return
3430 NULL_RTX if we failed the caller should emit a normal call, otherwise
3431 try to get the result in TARGET, if convenient (and in mode MODE if that's
3435 expand_builtin_memset (tree exp
, rtx target
, machine_mode mode
)
3437 if (!validate_arglist (exp
,
3438 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3442 tree dest
= CALL_EXPR_ARG (exp
, 0);
3443 tree val
= CALL_EXPR_ARG (exp
, 1);
3444 tree len
= CALL_EXPR_ARG (exp
, 2);
3445 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3449 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3450 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3451 try to get the result in TARGET, if convenient (and in mode MODE if that's
3455 expand_builtin_memset_with_bounds (tree exp
, rtx target
, machine_mode mode
)
3457 if (!validate_arglist (exp
,
3458 POINTER_TYPE
, POINTER_BOUNDS_TYPE
,
3459 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3463 tree dest
= CALL_EXPR_ARG (exp
, 0);
3464 tree val
= CALL_EXPR_ARG (exp
, 2);
3465 tree len
= CALL_EXPR_ARG (exp
, 3);
3466 rtx res
= expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3468 /* Return src bounds with the result. */
3471 rtx bnd
= force_reg (targetm
.chkp_bound_mode (),
3472 expand_normal (CALL_EXPR_ARG (exp
, 1)));
3473 res
= chkp_join_splitted_slot (res
, bnd
);
3479 /* Helper function to do the actual work for expand_builtin_memset. The
3480 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3481 so that this can also be called without constructing an actual CALL_EXPR.
3482 The other arguments and return value are the same as for
3483 expand_builtin_memset. */
3486 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3487 rtx target
, machine_mode mode
, tree orig_exp
)
3490 enum built_in_function fcode
;
3491 machine_mode val_mode
;
3493 unsigned int dest_align
;
3494 rtx dest_mem
, dest_addr
, len_rtx
;
3495 HOST_WIDE_INT expected_size
= -1;
3496 unsigned int expected_align
= 0;
3497 unsigned HOST_WIDE_INT min_size
;
3498 unsigned HOST_WIDE_INT max_size
;
3499 unsigned HOST_WIDE_INT probable_max_size
;
3501 dest_align
= get_pointer_alignment (dest
);
3503 /* If DEST is not a pointer type, don't do this operation in-line. */
3504 if (dest_align
== 0)
3507 if (currently_expanding_gimple_stmt
)
3508 stringop_block_profile (currently_expanding_gimple_stmt
,
3509 &expected_align
, &expected_size
);
3511 if (expected_align
< dest_align
)
3512 expected_align
= dest_align
;
3514 /* If the LEN parameter is zero, return DEST. */
3515 if (integer_zerop (len
))
3517 /* Evaluate and ignore VAL in case it has side-effects. */
3518 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3519 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3522 /* Stabilize the arguments in case we fail. */
3523 dest
= builtin_save_expr (dest
);
3524 val
= builtin_save_expr (val
);
3525 len
= builtin_save_expr (len
);
3527 len_rtx
= expand_normal (len
);
3528 determine_block_size (len
, len_rtx
, &min_size
, &max_size
,
3529 &probable_max_size
);
3530 dest_mem
= get_memory_rtx (dest
, len
);
3531 val_mode
= TYPE_MODE (unsigned_char_type_node
);
3533 if (TREE_CODE (val
) != INTEGER_CST
)
3537 val_rtx
= expand_normal (val
);
3538 val_rtx
= convert_to_mode (val_mode
, val_rtx
, 0);
3540 /* Assume that we can memset by pieces if we can store
3541 * the coefficients by pieces (in the required modes).
3542 * We can't pass builtin_memset_gen_str as that emits RTL. */
3544 if (tree_fits_uhwi_p (len
)
3545 && can_store_by_pieces (tree_to_uhwi (len
),
3546 builtin_memset_read_str
, &c
, dest_align
,
3549 val_rtx
= force_reg (val_mode
, val_rtx
);
3550 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3551 builtin_memset_gen_str
, val_rtx
, dest_align
,
3554 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3555 dest_align
, expected_align
,
3556 expected_size
, min_size
, max_size
,
3560 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3561 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3565 if (target_char_cast (val
, &c
))
3570 if (tree_fits_uhwi_p (len
)
3571 && can_store_by_pieces (tree_to_uhwi (len
),
3572 builtin_memset_read_str
, &c
, dest_align
,
3574 store_by_pieces (dest_mem
, tree_to_uhwi (len
),
3575 builtin_memset_read_str
, &c
, dest_align
, true, 0);
3576 else if (!set_storage_via_setmem (dest_mem
, len_rtx
,
3577 gen_int_mode (c
, val_mode
),
3578 dest_align
, expected_align
,
3579 expected_size
, min_size
, max_size
,
3583 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3584 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3588 set_mem_align (dest_mem
, dest_align
);
3589 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
3590 CALL_EXPR_TAILCALL (orig_exp
)
3591 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3592 expected_align
, expected_size
,
3598 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3599 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3605 fndecl
= get_callee_fndecl (orig_exp
);
3606 fcode
= DECL_FUNCTION_CODE (fndecl
);
3607 if (fcode
== BUILT_IN_MEMSET
3608 || fcode
== BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
)
3609 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 3,
3611 else if (fcode
== BUILT_IN_BZERO
)
3612 fn
= build_call_nofold_loc (EXPR_LOCATION (orig_exp
), fndecl
, 2,
3616 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3617 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
3618 return expand_call (fn
, target
, target
== const0_rtx
);
3621 /* Expand expression EXP, which is a call to the bzero builtin. Return
3622 NULL_RTX if we failed the caller should emit a normal call. */
3625 expand_builtin_bzero (tree exp
)
3628 location_t loc
= EXPR_LOCATION (exp
);
3630 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3633 dest
= CALL_EXPR_ARG (exp
, 0);
3634 size
= CALL_EXPR_ARG (exp
, 1);
3636 /* New argument list transforming bzero(ptr x, int y) to
3637 memset(ptr x, int 0, size_t y). This is done this way
3638 so that if it isn't expanded inline, we fallback to
3639 calling bzero instead of memset. */
3641 return expand_builtin_memset_args (dest
, integer_zero_node
,
3642 fold_convert_loc (loc
,
3643 size_type_node
, size
),
3644 const0_rtx
, VOIDmode
, exp
);
3647 /* Try to expand cmpstr operation ICODE with the given operands.
3648 Return the result rtx on success, otherwise return null. */
3651 expand_cmpstr (insn_code icode
, rtx target
, rtx arg1_rtx
, rtx arg2_rtx
,
3652 HOST_WIDE_INT align
)
3654 machine_mode insn_mode
= insn_data
[icode
].operand
[0].mode
;
3656 if (target
&& (!REG_P (target
) || HARD_REGISTER_P (target
)))
3659 struct expand_operand ops
[4];
3660 create_output_operand (&ops
[0], target
, insn_mode
);
3661 create_fixed_operand (&ops
[1], arg1_rtx
);
3662 create_fixed_operand (&ops
[2], arg2_rtx
);
3663 create_integer_operand (&ops
[3], align
);
3664 if (maybe_expand_insn (icode
, 4, ops
))
3665 return ops
[0].value
;
3669 /* Expand expression EXP, which is a call to the memcmp built-in function.
3670 Return NULL_RTX if we failed and the caller should emit a normal call,
3671 otherwise try to get the result in TARGET, if convenient.
3672 RESULT_EQ is true if we can relax the returned value to be either zero
3673 or nonzero, without caring about the sign. */
3676 expand_builtin_memcmp (tree exp
, rtx target
, bool result_eq
)
3678 if (!validate_arglist (exp
,
3679 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3682 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3683 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3684 tree len
= CALL_EXPR_ARG (exp
, 2);
3685 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
3686 location_t loc
= EXPR_LOCATION (exp
);
3688 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3689 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3691 /* If we don't have POINTER_TYPE, call the function. */
3692 if (arg1_align
== 0 || arg2_align
== 0)
3695 rtx arg1_rtx
= get_memory_rtx (arg1
, len
);
3696 rtx arg2_rtx
= get_memory_rtx (arg2
, len
);
3697 rtx len_rtx
= expand_normal (fold_convert_loc (loc
, sizetype
, len
));
3699 /* Set MEM_SIZE as appropriate. */
3700 if (CONST_INT_P (len_rtx
))
3702 set_mem_size (arg1_rtx
, INTVAL (len_rtx
));
3703 set_mem_size (arg2_rtx
, INTVAL (len_rtx
));
3706 by_pieces_constfn constfn
= NULL
;
3708 const char *src_str
= c_getstr (arg2
);
3709 if (result_eq
&& src_str
== NULL
)
3711 src_str
= c_getstr (arg1
);
3712 if (src_str
!= NULL
)
3713 std::swap (arg1_rtx
, arg2_rtx
);
3716 /* If SRC is a string constant and block move would be done
3717 by pieces, we can avoid loading the string from memory
3718 and only stored the computed constants. */
3720 && CONST_INT_P (len_rtx
)
3721 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1)
3722 constfn
= builtin_memcpy_read_str
;
3724 rtx result
= emit_block_cmp_hints (arg1_rtx
, arg2_rtx
, len_rtx
,
3725 TREE_TYPE (len
), target
,
3727 CONST_CAST (char *, src_str
));
3731 /* Return the value in the proper mode for this function. */
3732 if (GET_MODE (result
) == mode
)
3737 convert_move (target
, result
, 0);
3741 return convert_to_mode (mode
, result
, 0);
3747 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3748 if we failed the caller should emit a normal call, otherwise try to get
3749 the result in TARGET, if convenient. */
3752 expand_builtin_strcmp (tree exp
, ATTRIBUTE_UNUSED rtx target
)
3754 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3757 insn_code cmpstr_icode
= direct_optab_handler (cmpstr_optab
, SImode
);
3758 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
3759 if (cmpstr_icode
!= CODE_FOR_nothing
|| cmpstrn_icode
!= CODE_FOR_nothing
)
3761 rtx arg1_rtx
, arg2_rtx
;
3763 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3764 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3765 rtx result
= NULL_RTX
;
3767 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3768 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3770 /* If we don't have POINTER_TYPE, call the function. */
3771 if (arg1_align
== 0 || arg2_align
== 0)
3774 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3775 arg1
= builtin_save_expr (arg1
);
3776 arg2
= builtin_save_expr (arg2
);
3778 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
3779 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
3781 /* Try to call cmpstrsi. */
3782 if (cmpstr_icode
!= CODE_FOR_nothing
)
3783 result
= expand_cmpstr (cmpstr_icode
, target
, arg1_rtx
, arg2_rtx
,
3784 MIN (arg1_align
, arg2_align
));
3786 /* Try to determine at least one length and call cmpstrnsi. */
3787 if (!result
&& cmpstrn_icode
!= CODE_FOR_nothing
)
3792 tree len1
= c_strlen (arg1
, 1);
3793 tree len2
= c_strlen (arg2
, 1);
3796 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
3798 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
3800 /* If we don't have a constant length for the first, use the length
3801 of the second, if we know it. We don't require a constant for
3802 this case; some cost analysis could be done if both are available
3803 but neither is constant. For now, assume they're equally cheap,
3804 unless one has side effects. If both strings have constant lengths,
3811 else if (TREE_SIDE_EFFECTS (len1
))
3813 else if (TREE_SIDE_EFFECTS (len2
))
3815 else if (TREE_CODE (len1
) != INTEGER_CST
)
3817 else if (TREE_CODE (len2
) != INTEGER_CST
)
3819 else if (tree_int_cst_lt (len1
, len2
))
3824 /* If both arguments have side effects, we cannot optimize. */
3825 if (len
&& !TREE_SIDE_EFFECTS (len
))
3827 arg3_rtx
= expand_normal (len
);
3828 result
= expand_cmpstrn_or_cmpmem
3829 (cmpstrn_icode
, target
, arg1_rtx
, arg2_rtx
, TREE_TYPE (len
),
3830 arg3_rtx
, MIN (arg1_align
, arg2_align
));
3836 /* Return the value in the proper mode for this function. */
3837 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
3838 if (GET_MODE (result
) == mode
)
3841 return convert_to_mode (mode
, result
, 0);
3842 convert_move (target
, result
, 0);
3846 /* Expand the library call ourselves using a stabilized argument
3847 list to avoid re-evaluating the function's arguments twice. */
3848 fndecl
= get_callee_fndecl (exp
);
3849 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 2, arg1
, arg2
);
3850 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3851 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
3852 return expand_call (fn
, target
, target
== const0_rtx
);
3857 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3858 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3859 the result in TARGET, if convenient. */
3862 expand_builtin_strncmp (tree exp
, ATTRIBUTE_UNUSED rtx target
,
3863 ATTRIBUTE_UNUSED machine_mode mode
)
3865 location_t loc ATTRIBUTE_UNUSED
= EXPR_LOCATION (exp
);
3867 if (!validate_arglist (exp
,
3868 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3871 /* If c_strlen can determine an expression for one of the string
3872 lengths, and it doesn't have side effects, then emit cmpstrnsi
3873 using length MIN(strlen(string)+1, arg3). */
3874 insn_code cmpstrn_icode
= direct_optab_handler (cmpstrn_optab
, SImode
);
3875 if (cmpstrn_icode
!= CODE_FOR_nothing
)
3877 tree len
, len1
, len2
;
3878 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3881 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3882 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3883 tree arg3
= CALL_EXPR_ARG (exp
, 2);
3885 unsigned int arg1_align
= get_pointer_alignment (arg1
) / BITS_PER_UNIT
;
3886 unsigned int arg2_align
= get_pointer_alignment (arg2
) / BITS_PER_UNIT
;
3888 len1
= c_strlen (arg1
, 1);
3889 len2
= c_strlen (arg2
, 1);
3892 len1
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len1
);
3894 len2
= size_binop_loc (loc
, PLUS_EXPR
, ssize_int (1), len2
);
3896 /* If we don't have a constant length for the first, use the length
3897 of the second, if we know it. We don't require a constant for
3898 this case; some cost analysis could be done if both are available
3899 but neither is constant. For now, assume they're equally cheap,
3900 unless one has side effects. If both strings have constant lengths,
3907 else if (TREE_SIDE_EFFECTS (len1
))
3909 else if (TREE_SIDE_EFFECTS (len2
))
3911 else if (TREE_CODE (len1
) != INTEGER_CST
)
3913 else if (TREE_CODE (len2
) != INTEGER_CST
)
3915 else if (tree_int_cst_lt (len1
, len2
))
3920 /* If both arguments have side effects, we cannot optimize. */
3921 if (!len
|| TREE_SIDE_EFFECTS (len
))
3924 /* The actual new length parameter is MIN(len,arg3). */
3925 len
= fold_build2_loc (loc
, MIN_EXPR
, TREE_TYPE (len
), len
,
3926 fold_convert_loc (loc
, TREE_TYPE (len
), arg3
));
3928 /* If we don't have POINTER_TYPE, call the function. */
3929 if (arg1_align
== 0 || arg2_align
== 0)
3932 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3933 arg1
= builtin_save_expr (arg1
);
3934 arg2
= builtin_save_expr (arg2
);
3935 len
= builtin_save_expr (len
);
3937 arg1_rtx
= get_memory_rtx (arg1
, len
);
3938 arg2_rtx
= get_memory_rtx (arg2
, len
);
3939 arg3_rtx
= expand_normal (len
);
3940 result
= expand_cmpstrn_or_cmpmem (cmpstrn_icode
, target
, arg1_rtx
,
3941 arg2_rtx
, TREE_TYPE (len
), arg3_rtx
,
3942 MIN (arg1_align
, arg2_align
));
3945 /* Return the value in the proper mode for this function. */
3946 mode
= TYPE_MODE (TREE_TYPE (exp
));
3947 if (GET_MODE (result
) == mode
)
3950 return convert_to_mode (mode
, result
, 0);
3951 convert_move (target
, result
, 0);
3955 /* Expand the library call ourselves using a stabilized argument
3956 list to avoid re-evaluating the function's arguments twice. */
3957 fndecl
= get_callee_fndecl (exp
);
3958 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fndecl
, 3,
3960 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
3961 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
3962 return expand_call (fn
, target
, target
== const0_rtx
);
3967 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3968 if that's convenient. */
3971 expand_builtin_saveregs (void)
3976 /* Don't do __builtin_saveregs more than once in a function.
3977 Save the result of the first call and reuse it. */
3978 if (saveregs_value
!= 0)
3979 return saveregs_value
;
3981 /* When this function is called, it means that registers must be
3982 saved on entry to this function. So we migrate the call to the
3983 first insn of this function. */
3987 /* Do whatever the machine needs done in this case. */
3988 val
= targetm
.calls
.expand_builtin_saveregs ();
3993 saveregs_value
= val
;
3995 /* Put the insns after the NOTE that starts the function. If this
3996 is inside a start_sequence, make the outer-level insn chain current, so
3997 the code is placed at the start of the function. */
3998 push_topmost_sequence ();
3999 emit_insn_after (seq
, entry_of_function ());
4000 pop_topmost_sequence ();
4005 /* Expand a call to __builtin_next_arg. */
4008 expand_builtin_next_arg (void)
4010 /* Checking arguments is already done in fold_builtin_next_arg
4011 that must be called before this function. */
4012 return expand_binop (ptr_mode
, add_optab
,
4013 crtl
->args
.internal_arg_pointer
,
4014 crtl
->args
.arg_offset_rtx
,
4015 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4018 /* Make it easier for the backends by protecting the valist argument
4019 from multiple evaluations. */
4022 stabilize_va_list_loc (location_t loc
, tree valist
, int needs_lvalue
)
4024 tree vatype
= targetm
.canonical_va_list_type (TREE_TYPE (valist
));
4026 /* The current way of determining the type of valist is completely
4027 bogus. We should have the information on the va builtin instead. */
4029 vatype
= targetm
.fn_abi_va_list (cfun
->decl
);
4031 if (TREE_CODE (vatype
) == ARRAY_TYPE
)
4033 if (TREE_SIDE_EFFECTS (valist
))
4034 valist
= save_expr (valist
);
4036 /* For this case, the backends will be expecting a pointer to
4037 vatype, but it's possible we've actually been given an array
4038 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4040 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4042 tree p1
= build_pointer_type (TREE_TYPE (vatype
));
4043 valist
= build_fold_addr_expr_with_type_loc (loc
, valist
, p1
);
4048 tree pt
= build_pointer_type (vatype
);
4052 if (! TREE_SIDE_EFFECTS (valist
))
4055 valist
= fold_build1_loc (loc
, ADDR_EXPR
, pt
, valist
);
4056 TREE_SIDE_EFFECTS (valist
) = 1;
4059 if (TREE_SIDE_EFFECTS (valist
))
4060 valist
= save_expr (valist
);
4061 valist
= fold_build2_loc (loc
, MEM_REF
,
4062 vatype
, valist
, build_int_cst (pt
, 0));
4068 /* The "standard" definition of va_list is void*. */
4071 std_build_builtin_va_list (void)
4073 return ptr_type_node
;
4076 /* The "standard" abi va_list is va_list_type_node. */
4079 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED
)
4081 return va_list_type_node
;
4084 /* The "standard" type of va_list is va_list_type_node. */
4087 std_canonical_va_list_type (tree type
)
4091 wtype
= va_list_type_node
;
4094 if (TREE_CODE (wtype
) == ARRAY_TYPE
)
4096 /* If va_list is an array type, the argument may have decayed
4097 to a pointer type, e.g. by being passed to another function.
4098 In that case, unwrap both types so that we can compare the
4099 underlying records. */
4100 if (TREE_CODE (htype
) == ARRAY_TYPE
4101 || POINTER_TYPE_P (htype
))
4103 wtype
= TREE_TYPE (wtype
);
4104 htype
= TREE_TYPE (htype
);
4107 if (TYPE_MAIN_VARIANT (wtype
) == TYPE_MAIN_VARIANT (htype
))
4108 return va_list_type_node
;
4113 /* The "standard" implementation of va_start: just assign `nextarg' to
4117 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4119 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4120 convert_move (va_r
, nextarg
, 0);
4122 /* We do not have any valid bounds for the pointer, so
4123 just store zero bounds for it. */
4124 if (chkp_function_instrumented_p (current_function_decl
))
4125 chkp_expand_bounds_reset_for_mem (valist
,
4126 make_tree (TREE_TYPE (valist
),
4130 /* Expand EXP, a call to __builtin_va_start. */
4133 expand_builtin_va_start (tree exp
)
4137 location_t loc
= EXPR_LOCATION (exp
);
4139 if (call_expr_nargs (exp
) < 2)
4141 error_at (loc
, "too few arguments to function %<va_start%>");
4145 if (fold_builtin_next_arg (exp
, true))
4148 nextarg
= expand_builtin_next_arg ();
4149 valist
= stabilize_va_list_loc (loc
, CALL_EXPR_ARG (exp
, 0), 1);
4151 if (targetm
.expand_builtin_va_start
)
4152 targetm
.expand_builtin_va_start (valist
, nextarg
);
4154 std_expand_builtin_va_start (valist
, nextarg
);
4159 /* Expand EXP, a call to __builtin_va_end. */
4162 expand_builtin_va_end (tree exp
)
4164 tree valist
= CALL_EXPR_ARG (exp
, 0);
4166 /* Evaluate for side effects, if needed. I hate macros that don't
4168 if (TREE_SIDE_EFFECTS (valist
))
4169 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4174 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4175 builtin rather than just as an assignment in stdarg.h because of the
4176 nastiness of array-type va_list types. */
4179 expand_builtin_va_copy (tree exp
)
4182 location_t loc
= EXPR_LOCATION (exp
);
4184 dst
= CALL_EXPR_ARG (exp
, 0);
4185 src
= CALL_EXPR_ARG (exp
, 1);
4187 dst
= stabilize_va_list_loc (loc
, dst
, 1);
4188 src
= stabilize_va_list_loc (loc
, src
, 0);
4190 gcc_assert (cfun
!= NULL
&& cfun
->decl
!= NULL_TREE
);
4192 if (TREE_CODE (targetm
.fn_abi_va_list (cfun
->decl
)) != ARRAY_TYPE
)
4194 t
= build2 (MODIFY_EXPR
, targetm
.fn_abi_va_list (cfun
->decl
), dst
, src
);
4195 TREE_SIDE_EFFECTS (t
) = 1;
4196 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4200 rtx dstb
, srcb
, size
;
4202 /* Evaluate to pointers. */
4203 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4204 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4205 size
= expand_expr (TYPE_SIZE_UNIT (targetm
.fn_abi_va_list (cfun
->decl
)),
4206 NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
4208 dstb
= convert_memory_address (Pmode
, dstb
);
4209 srcb
= convert_memory_address (Pmode
, srcb
);
4211 /* "Dereference" to BLKmode memories. */
4212 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4213 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4214 set_mem_align (dstb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4215 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4216 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4217 set_mem_align (srcb
, TYPE_ALIGN (targetm
.fn_abi_va_list (cfun
->decl
)));
4220 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4226 /* Expand a call to one of the builtin functions __builtin_frame_address or
4227 __builtin_return_address. */
4230 expand_builtin_frame_address (tree fndecl
, tree exp
)
4232 /* The argument must be a nonnegative integer constant.
4233 It counts the number of frames to scan up the stack.
4234 The value is either the frame pointer value or the return
4235 address saved in that frame. */
4236 if (call_expr_nargs (exp
) == 0)
4237 /* Warning about missing arg was already issued. */
4239 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp
, 0)))
4241 error ("invalid argument to %qD", fndecl
);
4246 /* Number of frames to scan up the stack. */
4247 unsigned HOST_WIDE_INT count
= tree_to_uhwi (CALL_EXPR_ARG (exp
, 0));
4249 rtx tem
= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
), count
);
4251 /* Some ports cannot access arbitrary stack frames. */
4254 warning (0, "unsupported argument to %qD", fndecl
);
4260 /* Warn since no effort is made to ensure that any frame
4261 beyond the current one exists or can be safely reached. */
4262 warning (OPT_Wframe_address
, "calling %qD with "
4263 "a nonzero argument is unsafe", fndecl
);
4266 /* For __builtin_frame_address, return what we've got. */
4267 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4271 && ! CONSTANT_P (tem
))
4272 tem
= copy_addr_to_reg (tem
);
4277 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4278 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4279 is the same as for allocate_dynamic_stack_space. */
4282 expand_builtin_alloca (tree exp
, bool cannot_accumulate
)
4288 bool alloca_with_align
= (DECL_FUNCTION_CODE (get_callee_fndecl (exp
))
4289 == BUILT_IN_ALLOCA_WITH_ALIGN
);
4292 = (alloca_with_align
4293 ? validate_arglist (exp
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4294 : validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
));
4299 /* Compute the argument. */
4300 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4302 /* Compute the alignment. */
4303 align
= (alloca_with_align
4304 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp
, 1))
4305 : BIGGEST_ALIGNMENT
);
4307 /* Allocate the desired space. */
4308 result
= allocate_dynamic_stack_space (op0
, 0, align
, cannot_accumulate
);
4309 result
= convert_memory_address (ptr_mode
, result
);
4314 /* Expand a call to bswap builtin in EXP.
4315 Return NULL_RTX if a normal call should be emitted rather than expanding the
4316 function in-line. If convenient, the result should be placed in TARGET.
4317 SUBTARGET may be used as the target for computing one of EXP's operands. */
4320 expand_builtin_bswap (machine_mode target_mode
, tree exp
, rtx target
,
4326 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4329 arg
= CALL_EXPR_ARG (exp
, 0);
4330 op0
= expand_expr (arg
,
4331 subtarget
&& GET_MODE (subtarget
) == target_mode
4332 ? subtarget
: NULL_RTX
,
4333 target_mode
, EXPAND_NORMAL
);
4334 if (GET_MODE (op0
) != target_mode
)
4335 op0
= convert_to_mode (target_mode
, op0
, 1);
4337 target
= expand_unop (target_mode
, bswap_optab
, op0
, target
, 1);
4339 gcc_assert (target
);
4341 return convert_to_mode (target_mode
, target
, 1);
4344 /* Expand a call to a unary builtin in EXP.
4345 Return NULL_RTX if a normal call should be emitted rather than expanding the
4346 function in-line. If convenient, the result should be placed in TARGET.
4347 SUBTARGET may be used as the target for computing one of EXP's operands. */
4350 expand_builtin_unop (machine_mode target_mode
, tree exp
, rtx target
,
4351 rtx subtarget
, optab op_optab
)
4355 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4358 /* Compute the argument. */
4359 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0),
4361 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0)))
4362 == GET_MODE (subtarget
))) ? subtarget
: NULL_RTX
,
4363 VOIDmode
, EXPAND_NORMAL
);
4364 /* Compute op, into TARGET if possible.
4365 Set TARGET to wherever the result comes back. */
4366 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
4367 op_optab
, op0
, target
, op_optab
!= clrsb_optab
);
4368 gcc_assert (target
);
4370 return convert_to_mode (target_mode
, target
, 0);
4373 /* Expand a call to __builtin_expect. We just return our argument
4374 as the builtin_expect semantic should've been already executed by
4375 tree branch prediction pass. */
4378 expand_builtin_expect (tree exp
, rtx target
)
4382 if (call_expr_nargs (exp
) < 2)
4384 arg
= CALL_EXPR_ARG (exp
, 0);
4386 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4387 /* When guessing was done, the hints should be already stripped away. */
4388 gcc_assert (!flag_guess_branch_prob
4389 || optimize
== 0 || seen_error ());
4393 /* Expand a call to __builtin_assume_aligned. We just return our first
4394 argument as the builtin_assume_aligned semantic should've been already
4398 expand_builtin_assume_aligned (tree exp
, rtx target
)
4400 if (call_expr_nargs (exp
) < 2)
4402 target
= expand_expr (CALL_EXPR_ARG (exp
, 0), target
, VOIDmode
,
4404 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 1))
4405 && (call_expr_nargs (exp
) < 3
4406 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp
, 2))));
4411 expand_builtin_trap (void)
4413 if (targetm
.have_trap ())
4415 rtx_insn
*insn
= emit_insn (targetm
.gen_trap ());
4416 /* For trap insns when not accumulating outgoing args force
4417 REG_ARGS_SIZE note to prevent crossjumping of calls with
4418 different args sizes. */
4419 if (!ACCUMULATE_OUTGOING_ARGS
)
4420 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
4424 tree fn
= builtin_decl_implicit (BUILT_IN_ABORT
);
4425 tree call_expr
= build_call_expr (fn
, 0);
4426 expand_call (call_expr
, NULL_RTX
, false);
4432 /* Expand a call to __builtin_unreachable. We do nothing except emit
4433 a barrier saying that control flow will not pass here.
4435 It is the responsibility of the program being compiled to ensure
4436 that control flow does never reach __builtin_unreachable. */
4438 expand_builtin_unreachable (void)
4443 /* Expand EXP, a call to fabs, fabsf or fabsl.
4444 Return NULL_RTX if a normal call should be emitted rather than expanding
4445 the function inline. If convenient, the result should be placed
4446 in TARGET. SUBTARGET may be used as the target for computing
4450 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
4456 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4459 arg
= CALL_EXPR_ARG (exp
, 0);
4460 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
4461 mode
= TYPE_MODE (TREE_TYPE (arg
));
4462 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4463 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
4466 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4467 Return NULL is a normal call should be emitted rather than expanding the
4468 function inline. If convenient, the result should be placed in TARGET.
4469 SUBTARGET may be used as the target for computing the operand. */
4472 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
4477 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
4480 arg
= CALL_EXPR_ARG (exp
, 0);
4481 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
4483 arg
= CALL_EXPR_ARG (exp
, 1);
4484 op1
= expand_normal (arg
);
4486 return expand_copysign (op0
, op1
, target
);
4489 /* Expand a call to __builtin___clear_cache. */
4492 expand_builtin___clear_cache (tree exp
)
4494 if (!targetm
.code_for_clear_cache
)
4496 #ifdef CLEAR_INSN_CACHE
4497 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4498 does something. Just do the default expansion to a call to
4502 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4503 does nothing. There is no need to call it. Do nothing. */
4505 #endif /* CLEAR_INSN_CACHE */
4508 /* We have a "clear_cache" insn, and it will handle everything. */
4510 rtx begin_rtx
, end_rtx
;
4512 /* We must not expand to a library call. If we did, any
4513 fallback library function in libgcc that might contain a call to
4514 __builtin___clear_cache() would recurse infinitely. */
4515 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4517 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4521 if (targetm
.have_clear_cache ())
4523 struct expand_operand ops
[2];
4525 begin
= CALL_EXPR_ARG (exp
, 0);
4526 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4528 end
= CALL_EXPR_ARG (exp
, 1);
4529 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4531 create_address_operand (&ops
[0], begin_rtx
);
4532 create_address_operand (&ops
[1], end_rtx
);
4533 if (maybe_expand_insn (targetm
.code_for_clear_cache
, 2, ops
))
4539 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4542 round_trampoline_addr (rtx tramp
)
4544 rtx temp
, addend
, mask
;
4546 /* If we don't need too much alignment, we'll have been guaranteed
4547 proper alignment by get_trampoline_type. */
4548 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
4551 /* Round address up to desired boundary. */
4552 temp
= gen_reg_rtx (Pmode
);
4553 addend
= gen_int_mode (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1, Pmode
);
4554 mask
= gen_int_mode (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
, Pmode
);
4556 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
4557 temp
, 0, OPTAB_LIB_WIDEN
);
4558 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
4559 temp
, 0, OPTAB_LIB_WIDEN
);
4565 expand_builtin_init_trampoline (tree exp
, bool onstack
)
4567 tree t_tramp
, t_func
, t_chain
;
4568 rtx m_tramp
, r_tramp
, r_chain
, tmp
;
4570 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
4571 POINTER_TYPE
, VOID_TYPE
))
4574 t_tramp
= CALL_EXPR_ARG (exp
, 0);
4575 t_func
= CALL_EXPR_ARG (exp
, 1);
4576 t_chain
= CALL_EXPR_ARG (exp
, 2);
4578 r_tramp
= expand_normal (t_tramp
);
4579 m_tramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
4580 MEM_NOTRAP_P (m_tramp
) = 1;
4582 /* If ONSTACK, the TRAMP argument should be the address of a field
4583 within the local function's FRAME decl. Either way, let's see if
4584 we can fill in the MEM_ATTRs for this memory. */
4585 if (TREE_CODE (t_tramp
) == ADDR_EXPR
)
4586 set_mem_attributes (m_tramp
, TREE_OPERAND (t_tramp
, 0), true);
4588 /* Creator of a heap trampoline is responsible for making sure the
4589 address is aligned to at least STACK_BOUNDARY. Normally malloc
4590 will ensure this anyhow. */
4591 tmp
= round_trampoline_addr (r_tramp
);
4594 m_tramp
= change_address (m_tramp
, BLKmode
, tmp
);
4595 set_mem_align (m_tramp
, TRAMPOLINE_ALIGNMENT
);
4596 set_mem_size (m_tramp
, TRAMPOLINE_SIZE
);
4599 /* The FUNC argument should be the address of the nested function.
4600 Extract the actual function decl to pass to the hook. */
4601 gcc_assert (TREE_CODE (t_func
) == ADDR_EXPR
);
4602 t_func
= TREE_OPERAND (t_func
, 0);
4603 gcc_assert (TREE_CODE (t_func
) == FUNCTION_DECL
);
4605 r_chain
= expand_normal (t_chain
);
4607 /* Generate insns to initialize the trampoline. */
4608 targetm
.calls
.trampoline_init (m_tramp
, t_func
, r_chain
);
4612 trampolines_created
= 1;
4614 warning_at (DECL_SOURCE_LOCATION (t_func
), OPT_Wtrampolines
,
4615 "trampoline generated for nested function %qD", t_func
);
4622 expand_builtin_adjust_trampoline (tree exp
)
4626 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
4629 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4630 tramp
= round_trampoline_addr (tramp
);
4631 if (targetm
.calls
.trampoline_adjust_address
)
4632 tramp
= targetm
.calls
.trampoline_adjust_address (tramp
);
4637 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4638 function. The function first checks whether the back end provides
4639 an insn to implement signbit for the respective mode. If not, it
4640 checks whether the floating point format of the value is such that
4641 the sign bit can be extracted. If that is not the case, error out.
4642 EXP is the expression that is a call to the builtin function; if
4643 convenient, the result should be placed in TARGET. */
4645 expand_builtin_signbit (tree exp
, rtx target
)
4647 const struct real_format
*fmt
;
4648 machine_mode fmode
, imode
, rmode
;
4651 enum insn_code icode
;
4653 location_t loc
= EXPR_LOCATION (exp
);
4655 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
4658 arg
= CALL_EXPR_ARG (exp
, 0);
4659 fmode
= TYPE_MODE (TREE_TYPE (arg
));
4660 rmode
= TYPE_MODE (TREE_TYPE (exp
));
4661 fmt
= REAL_MODE_FORMAT (fmode
);
4663 arg
= builtin_save_expr (arg
);
4665 /* Expand the argument yielding a RTX expression. */
4666 temp
= expand_normal (arg
);
4668 /* Check if the back end provides an insn that handles signbit for the
4670 icode
= optab_handler (signbit_optab
, fmode
);
4671 if (icode
!= CODE_FOR_nothing
)
4673 rtx_insn
*last
= get_last_insn ();
4674 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
4675 if (maybe_emit_unop_insn (icode
, target
, temp
, UNKNOWN
))
4677 delete_insns_since (last
);
4680 /* For floating point formats without a sign bit, implement signbit
4682 bitpos
= fmt
->signbit_ro
;
4685 /* But we can't do this if the format supports signed zero. */
4686 gcc_assert (!fmt
->has_signed_zero
|| !HONOR_SIGNED_ZEROS (fmode
));
4688 arg
= fold_build2_loc (loc
, LT_EXPR
, TREE_TYPE (exp
), arg
,
4689 build_real (TREE_TYPE (arg
), dconst0
));
4690 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
4693 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
4695 imode
= int_mode_for_mode (fmode
);
4696 gcc_assert (imode
!= BLKmode
);
4697 temp
= gen_lowpart (imode
, temp
);
4702 /* Handle targets with different FP word orders. */
4703 if (FLOAT_WORDS_BIG_ENDIAN
)
4704 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
4706 word
= bitpos
/ BITS_PER_WORD
;
4707 temp
= operand_subword_force (temp
, word
, fmode
);
4708 bitpos
= bitpos
% BITS_PER_WORD
;
4711 /* Force the intermediate word_mode (or narrower) result into a
4712 register. This avoids attempting to create paradoxical SUBREGs
4713 of floating point modes below. */
4714 temp
= force_reg (imode
, temp
);
4716 /* If the bitpos is within the "result mode" lowpart, the operation
4717 can be implement with a single bitwise AND. Otherwise, we need
4718 a right shift and an AND. */
4720 if (bitpos
< GET_MODE_BITSIZE (rmode
))
4722 wide_int mask
= wi::set_bit_in_zero (bitpos
, GET_MODE_PRECISION (rmode
));
4724 if (GET_MODE_SIZE (imode
) > GET_MODE_SIZE (rmode
))
4725 temp
= gen_lowpart (rmode
, temp
);
4726 temp
= expand_binop (rmode
, and_optab
, temp
,
4727 immed_wide_int_const (mask
, rmode
),
4728 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
4732 /* Perform a logical right shift to place the signbit in the least
4733 significant bit, then truncate the result to the desired mode
4734 and mask just this bit. */
4735 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
, bitpos
, NULL_RTX
, 1);
4736 temp
= gen_lowpart (rmode
, temp
);
4737 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
4738 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
4744 /* Expand fork or exec calls. TARGET is the desired target of the
4745 call. EXP is the call. FN is the
4746 identificator of the actual function. IGNORE is nonzero if the
4747 value is to be ignored. */
4750 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
4755 /* If we are not profiling, just call the function. */
4756 if (!profile_arc_flag
)
4759 /* Otherwise call the wrapper. This should be equivalent for the rest of
4760 compiler, so the code does not diverge, and the wrapper may run the
4761 code necessary for keeping the profiling sane. */
4763 switch (DECL_FUNCTION_CODE (fn
))
4766 id
= get_identifier ("__gcov_fork");
4769 case BUILT_IN_EXECL
:
4770 id
= get_identifier ("__gcov_execl");
4773 case BUILT_IN_EXECV
:
4774 id
= get_identifier ("__gcov_execv");
4777 case BUILT_IN_EXECLP
:
4778 id
= get_identifier ("__gcov_execlp");
4781 case BUILT_IN_EXECLE
:
4782 id
= get_identifier ("__gcov_execle");
4785 case BUILT_IN_EXECVP
:
4786 id
= get_identifier ("__gcov_execvp");
4789 case BUILT_IN_EXECVE
:
4790 id
= get_identifier ("__gcov_execve");
4797 decl
= build_decl (DECL_SOURCE_LOCATION (fn
),
4798 FUNCTION_DECL
, id
, TREE_TYPE (fn
));
4799 DECL_EXTERNAL (decl
) = 1;
4800 TREE_PUBLIC (decl
) = 1;
4801 DECL_ARTIFICIAL (decl
) = 1;
4802 TREE_NOTHROW (decl
) = 1;
4803 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
4804 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
4805 call
= rewrite_call_expr (EXPR_LOCATION (exp
), exp
, 0, decl
, 0);
4806 return expand_call (call
, target
, ignore
);
4811 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
4812 the pointer in these functions is void*, the tree optimizers may remove
4813 casts. The mode computed in expand_builtin isn't reliable either, due
4814 to __sync_bool_compare_and_swap.
4816 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
4817 group of builtins. This gives us log2 of the mode size. */
4819 static inline machine_mode
4820 get_builtin_sync_mode (int fcode_diff
)
4822 /* The size is not negotiable, so ask not to get BLKmode in return
4823 if the target indicates that a smaller size would be better. */
4824 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
4827 /* Expand the memory expression LOC and return the appropriate memory operand
4828 for the builtin_sync operations. */
4831 get_builtin_sync_mem (tree loc
, machine_mode mode
)
4835 addr
= expand_expr (loc
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
4836 addr
= convert_memory_address (Pmode
, addr
);
4838 /* Note that we explicitly do not want any alias information for this
4839 memory, so that we kill all other live memories. Otherwise we don't
4840 satisfy the full barrier semantics of the intrinsic. */
4841 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
4843 /* The alignment needs to be at least according to that of the mode. */
4844 set_mem_align (mem
, MAX (GET_MODE_ALIGNMENT (mode
),
4845 get_pointer_alignment (loc
)));
4846 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
4847 MEM_VOLATILE_P (mem
) = 1;
4852 /* Make sure an argument is in the right mode.
4853 EXP is the tree argument.
4854 MODE is the mode it should be in. */
4857 expand_expr_force_mode (tree exp
, machine_mode mode
)
4860 machine_mode old_mode
;
4862 val
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_NORMAL
);
4863 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
4864 of CONST_INTs, where we know the old_mode only from the call argument. */
4866 old_mode
= GET_MODE (val
);
4867 if (old_mode
== VOIDmode
)
4868 old_mode
= TYPE_MODE (TREE_TYPE (exp
));
4869 val
= convert_modes (mode
, old_mode
, val
, 1);
4874 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
4875 EXP is the CALL_EXPR. CODE is the rtx code
4876 that corresponds to the arithmetic or logical operation from the name;
4877 an exception here is that NOT actually means NAND. TARGET is an optional
4878 place for us to store the results; AFTER is true if this is the
4879 fetch_and_xxx form. */
4882 expand_builtin_sync_operation (machine_mode mode
, tree exp
,
4883 enum rtx_code code
, bool after
,
4887 location_t loc
= EXPR_LOCATION (exp
);
4889 if (code
== NOT
&& warn_sync_nand
)
4891 tree fndecl
= get_callee_fndecl (exp
);
4892 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
4894 static bool warned_f_a_n
, warned_n_a_f
;
4898 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
4899 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
4900 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
4901 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
4902 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
4906 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N
);
4907 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
4908 warned_f_a_n
= true;
4911 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
4912 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
4913 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
4914 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
4915 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
4919 fndecl
= builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N
);
4920 inform (loc
, "%qD changed semantics in GCC 4.4", fndecl
);
4921 warned_n_a_f
= true;
4929 /* Expand the operands. */
4930 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
4931 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
4933 return expand_atomic_fetch_op (target
, mem
, val
, code
, MEMMODEL_SYNC_SEQ_CST
,
4937 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
4938 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
4939 true if this is the boolean form. TARGET is a place for us to store the
4940 results; this is NOT optional if IS_BOOL is true. */
4943 expand_builtin_compare_and_swap (machine_mode mode
, tree exp
,
4944 bool is_bool
, rtx target
)
4946 rtx old_val
, new_val
, mem
;
4949 /* Expand the operands. */
4950 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
4951 old_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
4952 new_val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
4954 pbool
= poval
= NULL
;
4955 if (target
!= const0_rtx
)
4962 if (!expand_atomic_compare_and_swap (pbool
, poval
, mem
, old_val
, new_val
,
4963 false, MEMMODEL_SYNC_SEQ_CST
,
4964 MEMMODEL_SYNC_SEQ_CST
))
4970 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
4971 general form is actually an atomic exchange, and some targets only
4972 support a reduced form with the second argument being a constant 1.
4973 EXP is the CALL_EXPR; TARGET is an optional place for us to store
4977 expand_builtin_sync_lock_test_and_set (machine_mode mode
, tree exp
,
4982 /* Expand the operands. */
4983 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
4984 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
4986 return expand_sync_lock_test_and_set (target
, mem
, val
);
4989 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
4992 expand_builtin_sync_lock_release (machine_mode mode
, tree exp
)
4996 /* Expand the operands. */
4997 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
4999 expand_atomic_store (mem
, const0_rtx
, MEMMODEL_SYNC_RELEASE
, true);
5002 /* Given an integer representing an ``enum memmodel'', verify its
5003 correctness and return the memory model enum. */
5005 static enum memmodel
5006 get_memmodel (tree exp
)
5009 unsigned HOST_WIDE_INT val
;
5011 = expansion_point_location_if_in_system_header (input_location
);
5013 /* If the parameter is not a constant, it's a run time value so we'll just
5014 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5015 if (TREE_CODE (exp
) != INTEGER_CST
)
5016 return MEMMODEL_SEQ_CST
;
5018 op
= expand_normal (exp
);
5021 if (targetm
.memmodel_check
)
5022 val
= targetm
.memmodel_check (val
);
5023 else if (val
& ~MEMMODEL_MASK
)
5025 warning_at (loc
, OPT_Winvalid_memory_model
,
5026 "unknown architecture specifier in memory model to builtin");
5027 return MEMMODEL_SEQ_CST
;
5030 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5031 if (memmodel_base (val
) >= MEMMODEL_LAST
)
5033 warning_at (loc
, OPT_Winvalid_memory_model
,
5034 "invalid memory model argument to builtin");
5035 return MEMMODEL_SEQ_CST
;
5038 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5039 be conservative and promote consume to acquire. */
5040 if (val
== MEMMODEL_CONSUME
)
5041 val
= MEMMODEL_ACQUIRE
;
5043 return (enum memmodel
) val
;
5046 /* Expand the __atomic_exchange intrinsic:
5047 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5048 EXP is the CALL_EXPR.
5049 TARGET is an optional place for us to store the results. */
5052 expand_builtin_atomic_exchange (machine_mode mode
, tree exp
, rtx target
)
5055 enum memmodel model
;
5057 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5059 if (!flag_inline_atomics
)
5062 /* Expand the operands. */
5063 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5064 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5066 return expand_atomic_exchange (target
, mem
, val
, model
);
5069 /* Expand the __atomic_compare_exchange intrinsic:
5070 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5071 TYPE desired, BOOL weak,
5072 enum memmodel success,
5073 enum memmodel failure)
5074 EXP is the CALL_EXPR.
5075 TARGET is an optional place for us to store the results. */
5078 expand_builtin_atomic_compare_exchange (machine_mode mode
, tree exp
,
5081 rtx expect
, desired
, mem
, oldval
;
5082 rtx_code_label
*label
;
5083 enum memmodel success
, failure
;
5087 = expansion_point_location_if_in_system_header (input_location
);
5089 success
= get_memmodel (CALL_EXPR_ARG (exp
, 4));
5090 failure
= get_memmodel (CALL_EXPR_ARG (exp
, 5));
5092 if (failure
> success
)
5094 warning_at (loc
, OPT_Winvalid_memory_model
,
5095 "failure memory model cannot be stronger than success "
5096 "memory model for %<__atomic_compare_exchange%>");
5097 success
= MEMMODEL_SEQ_CST
;
5100 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
5102 warning_at (loc
, OPT_Winvalid_memory_model
,
5103 "invalid failure memory model for "
5104 "%<__atomic_compare_exchange%>");
5105 failure
= MEMMODEL_SEQ_CST
;
5106 success
= MEMMODEL_SEQ_CST
;
5110 if (!flag_inline_atomics
)
5113 /* Expand the operands. */
5114 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5116 expect
= expand_normal (CALL_EXPR_ARG (exp
, 1));
5117 expect
= convert_memory_address (Pmode
, expect
);
5118 expect
= gen_rtx_MEM (mode
, expect
);
5119 desired
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 2), mode
);
5121 weak
= CALL_EXPR_ARG (exp
, 3);
5123 if (tree_fits_shwi_p (weak
) && tree_to_shwi (weak
) != 0)
5126 if (target
== const0_rtx
)
5129 /* Lest the rtl backend create a race condition with an imporoper store
5130 to memory, always create a new pseudo for OLDVAL. */
5133 if (!expand_atomic_compare_and_swap (&target
, &oldval
, mem
, expect
, desired
,
5134 is_weak
, success
, failure
))
5137 /* Conditionally store back to EXPECT, lest we create a race condition
5138 with an improper store to memory. */
5139 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5140 the normal case where EXPECT is totally private, i.e. a register. At
5141 which point the store can be unconditional. */
5142 label
= gen_label_rtx ();
5143 emit_cmp_and_jump_insns (target
, const0_rtx
, NE
, NULL
,
5144 GET_MODE (target
), 1, label
);
5145 emit_move_insn (expect
, oldval
);
5151 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5152 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5153 call. The weak parameter must be dropped to match the expected parameter
5154 list and the expected argument changed from value to pointer to memory
5158 expand_ifn_atomic_compare_exchange_into_call (gcall
*call
, machine_mode mode
)
5161 vec
<tree
, va_gc
> *vec
;
5164 vec
->quick_push (gimple_call_arg (call
, 0));
5165 tree expected
= gimple_call_arg (call
, 1);
5166 rtx x
= assign_stack_temp_for_type (mode
, GET_MODE_SIZE (mode
),
5167 TREE_TYPE (expected
));
5168 rtx expd
= expand_expr (expected
, x
, mode
, EXPAND_NORMAL
);
5170 emit_move_insn (x
, expd
);
5171 tree v
= make_tree (TREE_TYPE (expected
), x
);
5172 vec
->quick_push (build1 (ADDR_EXPR
,
5173 build_pointer_type (TREE_TYPE (expected
)), v
));
5174 vec
->quick_push (gimple_call_arg (call
, 2));
5175 /* Skip the boolean weak parameter. */
5176 for (z
= 4; z
< 6; z
++)
5177 vec
->quick_push (gimple_call_arg (call
, z
));
5178 built_in_function fncode
5179 = (built_in_function
) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5180 + exact_log2 (GET_MODE_SIZE (mode
)));
5181 tree fndecl
= builtin_decl_explicit (fncode
);
5182 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fndecl
)),
5184 tree exp
= build_call_vec (boolean_type_node
, fn
, vec
);
5185 tree lhs
= gimple_call_lhs (call
);
5186 rtx boolret
= expand_call (exp
, NULL_RTX
, lhs
== NULL_TREE
);
5189 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5190 if (GET_MODE (boolret
) != mode
)
5191 boolret
= convert_modes (mode
, GET_MODE (boolret
), boolret
, 1);
5192 x
= force_reg (mode
, x
);
5193 write_complex_part (target
, boolret
, true);
5194 write_complex_part (target
, x
, false);
5198 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5201 expand_ifn_atomic_compare_exchange (gcall
*call
)
5203 int size
= tree_to_shwi (gimple_call_arg (call
, 3)) & 255;
5204 gcc_assert (size
== 1 || size
== 2 || size
== 4 || size
== 8 || size
== 16);
5205 machine_mode mode
= mode_for_size (BITS_PER_UNIT
* size
, MODE_INT
, 0);
5206 rtx expect
, desired
, mem
, oldval
, boolret
;
5207 enum memmodel success
, failure
;
5211 = expansion_point_location_if_in_system_header (gimple_location (call
));
5213 success
= get_memmodel (gimple_call_arg (call
, 4));
5214 failure
= get_memmodel (gimple_call_arg (call
, 5));
5216 if (failure
> success
)
5218 warning_at (loc
, OPT_Winvalid_memory_model
,
5219 "failure memory model cannot be stronger than success "
5220 "memory model for %<__atomic_compare_exchange%>");
5221 success
= MEMMODEL_SEQ_CST
;
5224 if (is_mm_release (failure
) || is_mm_acq_rel (failure
))
5226 warning_at (loc
, OPT_Winvalid_memory_model
,
5227 "invalid failure memory model for "
5228 "%<__atomic_compare_exchange%>");
5229 failure
= MEMMODEL_SEQ_CST
;
5230 success
= MEMMODEL_SEQ_CST
;
5233 if (!flag_inline_atomics
)
5235 expand_ifn_atomic_compare_exchange_into_call (call
, mode
);
5239 /* Expand the operands. */
5240 mem
= get_builtin_sync_mem (gimple_call_arg (call
, 0), mode
);
5242 expect
= expand_expr_force_mode (gimple_call_arg (call
, 1), mode
);
5243 desired
= expand_expr_force_mode (gimple_call_arg (call
, 2), mode
);
5245 is_weak
= (tree_to_shwi (gimple_call_arg (call
, 3)) & 256) != 0;
5250 if (!expand_atomic_compare_and_swap (&boolret
, &oldval
, mem
, expect
, desired
,
5251 is_weak
, success
, failure
))
5253 expand_ifn_atomic_compare_exchange_into_call (call
, mode
);
5257 lhs
= gimple_call_lhs (call
);
5260 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5261 if (GET_MODE (boolret
) != mode
)
5262 boolret
= convert_modes (mode
, GET_MODE (boolret
), boolret
, 1);
5263 write_complex_part (target
, boolret
, true);
5264 write_complex_part (target
, oldval
, false);
5268 /* Expand the __atomic_load intrinsic:
5269 TYPE __atomic_load (TYPE *object, enum memmodel)
5270 EXP is the CALL_EXPR.
5271 TARGET is an optional place for us to store the results. */
5274 expand_builtin_atomic_load (machine_mode mode
, tree exp
, rtx target
)
5277 enum memmodel model
;
5279 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5280 if (is_mm_release (model
) || is_mm_acq_rel (model
))
5283 = expansion_point_location_if_in_system_header (input_location
);
5284 warning_at (loc
, OPT_Winvalid_memory_model
,
5285 "invalid memory model for %<__atomic_load%>");
5286 model
= MEMMODEL_SEQ_CST
;
5289 if (!flag_inline_atomics
)
5292 /* Expand the operand. */
5293 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5295 return expand_atomic_load (target
, mem
, model
);
5299 /* Expand the __atomic_store intrinsic:
5300 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5301 EXP is the CALL_EXPR.
5302 TARGET is an optional place for us to store the results. */
5305 expand_builtin_atomic_store (machine_mode mode
, tree exp
)
5308 enum memmodel model
;
5310 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5311 if (!(is_mm_relaxed (model
) || is_mm_seq_cst (model
)
5312 || is_mm_release (model
)))
5315 = expansion_point_location_if_in_system_header (input_location
);
5316 warning_at (loc
, OPT_Winvalid_memory_model
,
5317 "invalid memory model for %<__atomic_store%>");
5318 model
= MEMMODEL_SEQ_CST
;
5321 if (!flag_inline_atomics
)
5324 /* Expand the operands. */
5325 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5326 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5328 return expand_atomic_store (mem
, val
, model
, false);
5331 /* Expand the __atomic_fetch_XXX intrinsic:
5332 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5333 EXP is the CALL_EXPR.
5334 TARGET is an optional place for us to store the results.
5335 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5336 FETCH_AFTER is true if returning the result of the operation.
5337 FETCH_AFTER is false if returning the value before the operation.
5338 IGNORE is true if the result is not used.
5339 EXT_CALL is the correct builtin for an external call if this cannot be
5340 resolved to an instruction sequence. */
5343 expand_builtin_atomic_fetch_op (machine_mode mode
, tree exp
, rtx target
,
5344 enum rtx_code code
, bool fetch_after
,
5345 bool ignore
, enum built_in_function ext_call
)
5348 enum memmodel model
;
5352 model
= get_memmodel (CALL_EXPR_ARG (exp
, 2));
5354 /* Expand the operands. */
5355 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5356 val
= expand_expr_force_mode (CALL_EXPR_ARG (exp
, 1), mode
);
5358 /* Only try generating instructions if inlining is turned on. */
5359 if (flag_inline_atomics
)
5361 ret
= expand_atomic_fetch_op (target
, mem
, val
, code
, model
, fetch_after
);
5366 /* Return if a different routine isn't needed for the library call. */
5367 if (ext_call
== BUILT_IN_NONE
)
5370 /* Change the call to the specified function. */
5371 fndecl
= get_callee_fndecl (exp
);
5372 addr
= CALL_EXPR_FN (exp
);
5375 gcc_assert (TREE_OPERAND (addr
, 0) == fndecl
);
5376 TREE_OPERAND (addr
, 0) = builtin_decl_explicit (ext_call
);
5378 /* Expand the call here so we can emit trailing code. */
5379 ret
= expand_call (exp
, target
, ignore
);
5381 /* Replace the original function just in case it matters. */
5382 TREE_OPERAND (addr
, 0) = fndecl
;
5384 /* Then issue the arithmetic correction to return the right result. */
5389 ret
= expand_simple_binop (mode
, AND
, ret
, val
, NULL_RTX
, true,
5391 ret
= expand_simple_unop (mode
, NOT
, ret
, target
, true);
5394 ret
= expand_simple_binop (mode
, code
, ret
, val
, target
, true,
5400 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
5403 expand_ifn_atomic_bit_test_and (gcall
*call
)
5405 tree ptr
= gimple_call_arg (call
, 0);
5406 tree bit
= gimple_call_arg (call
, 1);
5407 tree flag
= gimple_call_arg (call
, 2);
5408 tree lhs
= gimple_call_lhs (call
);
5409 enum memmodel model
= MEMMODEL_SYNC_SEQ_CST
;
5410 machine_mode mode
= TYPE_MODE (TREE_TYPE (flag
));
5413 struct expand_operand ops
[5];
5415 gcc_assert (flag_inline_atomics
);
5417 if (gimple_call_num_args (call
) == 4)
5418 model
= get_memmodel (gimple_call_arg (call
, 3));
5420 rtx mem
= get_builtin_sync_mem (ptr
, mode
);
5421 rtx val
= expand_expr_force_mode (bit
, mode
);
5423 switch (gimple_call_internal_fn (call
))
5425 case IFN_ATOMIC_BIT_TEST_AND_SET
:
5427 optab
= atomic_bit_test_and_set_optab
;
5429 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT
:
5431 optab
= atomic_bit_test_and_complement_optab
;
5433 case IFN_ATOMIC_BIT_TEST_AND_RESET
:
5435 optab
= atomic_bit_test_and_reset_optab
;
5441 if (lhs
== NULL_TREE
)
5443 val
= expand_simple_binop (mode
, ASHIFT
, const1_rtx
,
5444 val
, NULL_RTX
, true, OPTAB_DIRECT
);
5446 val
= expand_simple_unop (mode
, NOT
, val
, NULL_RTX
, true);
5447 expand_atomic_fetch_op (const0_rtx
, mem
, val
, code
, model
, false);
5451 rtx target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
5452 enum insn_code icode
= direct_optab_handler (optab
, mode
);
5453 gcc_assert (icode
!= CODE_FOR_nothing
);
5454 create_output_operand (&ops
[0], target
, mode
);
5455 create_fixed_operand (&ops
[1], mem
);
5456 create_convert_operand_to (&ops
[2], val
, mode
, true);
5457 create_integer_operand (&ops
[3], model
);
5458 create_integer_operand (&ops
[4], integer_onep (flag
));
5459 if (maybe_expand_insn (icode
, 5, ops
))
5463 val
= expand_simple_binop (mode
, ASHIFT
, const1_rtx
,
5464 val
, NULL_RTX
, true, OPTAB_DIRECT
);
5467 val
= expand_simple_unop (mode
, NOT
, val
, NULL_RTX
, true);
5468 rtx result
= expand_atomic_fetch_op (gen_reg_rtx (mode
), mem
, val
,
5469 code
, model
, false);
5470 if (integer_onep (flag
))
5472 result
= expand_simple_binop (mode
, ASHIFTRT
, result
, bitval
,
5473 NULL_RTX
, true, OPTAB_DIRECT
);
5474 result
= expand_simple_binop (mode
, AND
, result
, const1_rtx
, target
,
5475 true, OPTAB_DIRECT
);
5478 result
= expand_simple_binop (mode
, AND
, result
, maskval
, target
, true,
5480 if (result
!= target
)
5481 emit_move_insn (target
, result
);
5484 /* Expand an atomic clear operation.
5485 void _atomic_clear (BOOL *obj, enum memmodel)
5486 EXP is the call expression. */
5489 expand_builtin_atomic_clear (tree exp
)
5493 enum memmodel model
;
5495 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5496 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5497 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5499 if (is_mm_consume (model
) || is_mm_acquire (model
) || is_mm_acq_rel (model
))
5502 = expansion_point_location_if_in_system_header (input_location
);
5503 warning_at (loc
, OPT_Winvalid_memory_model
,
5504 "invalid memory model for %<__atomic_store%>");
5505 model
= MEMMODEL_SEQ_CST
;
5508 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5509 Failing that, a store is issued by __atomic_store. The only way this can
5510 fail is if the bool type is larger than a word size. Unlikely, but
5511 handle it anyway for completeness. Assume a single threaded model since
5512 there is no atomic support in this case, and no barriers are required. */
5513 ret
= expand_atomic_store (mem
, const0_rtx
, model
, true);
5515 emit_move_insn (mem
, const0_rtx
);
5519 /* Expand an atomic test_and_set operation.
5520 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5521 EXP is the call expression. */
5524 expand_builtin_atomic_test_and_set (tree exp
, rtx target
)
5527 enum memmodel model
;
5530 mode
= mode_for_size (BOOL_TYPE_SIZE
, MODE_INT
, 0);
5531 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5532 model
= get_memmodel (CALL_EXPR_ARG (exp
, 1));
5534 return expand_atomic_test_and_set (target
, mem
, model
);
5538 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5539 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5542 fold_builtin_atomic_always_lock_free (tree arg0
, tree arg1
)
5546 unsigned int mode_align
, type_align
;
5548 if (TREE_CODE (arg0
) != INTEGER_CST
)
5551 size
= INTVAL (expand_normal (arg0
)) * BITS_PER_UNIT
;
5552 mode
= mode_for_size (size
, MODE_INT
, 0);
5553 mode_align
= GET_MODE_ALIGNMENT (mode
);
5555 if (TREE_CODE (arg1
) == INTEGER_CST
)
5557 unsigned HOST_WIDE_INT val
= UINTVAL (expand_normal (arg1
));
5559 /* Either this argument is null, or it's a fake pointer encoding
5560 the alignment of the object. */
5561 val
= least_bit_hwi (val
);
5562 val
*= BITS_PER_UNIT
;
5564 if (val
== 0 || mode_align
< val
)
5565 type_align
= mode_align
;
5571 tree ttype
= TREE_TYPE (arg1
);
5573 /* This function is usually invoked and folded immediately by the front
5574 end before anything else has a chance to look at it. The pointer
5575 parameter at this point is usually cast to a void *, so check for that
5576 and look past the cast. */
5577 if (CONVERT_EXPR_P (arg1
)
5578 && POINTER_TYPE_P (ttype
)
5579 && VOID_TYPE_P (TREE_TYPE (ttype
))
5580 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
5581 arg1
= TREE_OPERAND (arg1
, 0);
5583 ttype
= TREE_TYPE (arg1
);
5584 gcc_assert (POINTER_TYPE_P (ttype
));
5586 /* Get the underlying type of the object. */
5587 ttype
= TREE_TYPE (ttype
);
5588 type_align
= TYPE_ALIGN (ttype
);
5591 /* If the object has smaller alignment, the lock free routines cannot
5593 if (type_align
< mode_align
)
5594 return boolean_false_node
;
5596 /* Check if a compare_and_swap pattern exists for the mode which represents
5597 the required size. The pattern is not allowed to fail, so the existence
5598 of the pattern indicates support is present. */
5599 if (can_compare_and_swap_p (mode
, true))
5600 return boolean_true_node
;
5602 return boolean_false_node
;
5605 /* Return true if the parameters to call EXP represent an object which will
5606 always generate lock free instructions. The first argument represents the
5607 size of the object, and the second parameter is a pointer to the object
5608 itself. If NULL is passed for the object, then the result is based on
5609 typical alignment for an object of the specified size. Otherwise return
5613 expand_builtin_atomic_always_lock_free (tree exp
)
5616 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5617 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5619 if (TREE_CODE (arg0
) != INTEGER_CST
)
5621 error ("non-constant argument 1 to __atomic_always_lock_free");
5625 size
= fold_builtin_atomic_always_lock_free (arg0
, arg1
);
5626 if (size
== boolean_true_node
)
5631 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5632 is lock free on this architecture. */
5635 fold_builtin_atomic_is_lock_free (tree arg0
, tree arg1
)
5637 if (!flag_inline_atomics
)
5640 /* If it isn't always lock free, don't generate a result. */
5641 if (fold_builtin_atomic_always_lock_free (arg0
, arg1
) == boolean_true_node
)
5642 return boolean_true_node
;
5647 /* Return true if the parameters to call EXP represent an object which will
5648 always generate lock free instructions. The first argument represents the
5649 size of the object, and the second parameter is a pointer to the object
5650 itself. If NULL is passed for the object, then the result is based on
5651 typical alignment for an object of the specified size. Otherwise return
5655 expand_builtin_atomic_is_lock_free (tree exp
)
5658 tree arg0
= CALL_EXPR_ARG (exp
, 0);
5659 tree arg1
= CALL_EXPR_ARG (exp
, 1);
5661 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
5663 error ("non-integer argument 1 to __atomic_is_lock_free");
5667 if (!flag_inline_atomics
)
5670 /* If the value is known at compile time, return the RTX for it. */
5671 size
= fold_builtin_atomic_is_lock_free (arg0
, arg1
);
5672 if (size
== boolean_true_node
)
5678 /* Expand the __atomic_thread_fence intrinsic:
5679 void __atomic_thread_fence (enum memmodel)
5680 EXP is the CALL_EXPR. */
5683 expand_builtin_atomic_thread_fence (tree exp
)
5685 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5686 expand_mem_thread_fence (model
);
5689 /* Expand the __atomic_signal_fence intrinsic:
5690 void __atomic_signal_fence (enum memmodel)
5691 EXP is the CALL_EXPR. */
5694 expand_builtin_atomic_signal_fence (tree exp
)
5696 enum memmodel model
= get_memmodel (CALL_EXPR_ARG (exp
, 0));
5697 expand_mem_signal_fence (model
);
5700 /* Expand the __sync_synchronize intrinsic. */
5703 expand_builtin_sync_synchronize (void)
5705 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST
);
5709 expand_builtin_thread_pointer (tree exp
, rtx target
)
5711 enum insn_code icode
;
5712 if (!validate_arglist (exp
, VOID_TYPE
))
5714 icode
= direct_optab_handler (get_thread_pointer_optab
, Pmode
);
5715 if (icode
!= CODE_FOR_nothing
)
5717 struct expand_operand op
;
5718 /* If the target is not sutitable then create a new target. */
5719 if (target
== NULL_RTX
5721 || GET_MODE (target
) != Pmode
)
5722 target
= gen_reg_rtx (Pmode
);
5723 create_output_operand (&op
, target
, Pmode
);
5724 expand_insn (icode
, 1, &op
);
5727 error ("__builtin_thread_pointer is not supported on this target");
5732 expand_builtin_set_thread_pointer (tree exp
)
5734 enum insn_code icode
;
5735 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5737 icode
= direct_optab_handler (set_thread_pointer_optab
, Pmode
);
5738 if (icode
!= CODE_FOR_nothing
)
5740 struct expand_operand op
;
5741 rtx val
= expand_expr (CALL_EXPR_ARG (exp
, 0), NULL_RTX
,
5742 Pmode
, EXPAND_NORMAL
);
5743 create_input_operand (&op
, val
, Pmode
);
5744 expand_insn (icode
, 1, &op
);
5747 error ("__builtin_set_thread_pointer is not supported on this target");
5751 /* Emit code to restore the current value of stack. */
5754 expand_stack_restore (tree var
)
5757 rtx sa
= expand_normal (var
);
5759 sa
= convert_memory_address (Pmode
, sa
);
5761 prev
= get_last_insn ();
5762 emit_stack_restore (SAVE_BLOCK
, sa
);
5764 record_new_stack_level ();
5766 fixup_args_size_notes (prev
, get_last_insn (), 0);
5769 /* Emit code to save the current value of stack. */
5772 expand_stack_save (void)
5776 emit_stack_save (SAVE_BLOCK
, &ret
);
5781 /* Expand an expression EXP that calls a built-in function,
5782 with result going to TARGET if that's convenient
5783 (and in mode MODE if that's convenient).
5784 SUBTARGET may be used as the target for computing one of EXP's operands.
5785 IGNORE is nonzero if the value is to be ignored. */
5788 expand_builtin (tree exp
, rtx target
, rtx subtarget
, machine_mode mode
,
5791 tree fndecl
= get_callee_fndecl (exp
);
5792 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5793 machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
5796 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5797 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
5799 /* When ASan is enabled, we don't want to expand some memory/string
5800 builtins and rely on libsanitizer's hooks. This allows us to avoid
5801 redundant checks and be sure, that possible overflow will be detected
5804 if ((flag_sanitize
& SANITIZE_ADDRESS
) && asan_intercepted_p (fcode
))
5805 return expand_call (exp
, target
, ignore
);
5807 /* When not optimizing, generate calls to library functions for a certain
5810 && !called_as_built_in (fndecl
)
5811 && fcode
!= BUILT_IN_FORK
5812 && fcode
!= BUILT_IN_EXECL
5813 && fcode
!= BUILT_IN_EXECV
5814 && fcode
!= BUILT_IN_EXECLP
5815 && fcode
!= BUILT_IN_EXECLE
5816 && fcode
!= BUILT_IN_EXECVP
5817 && fcode
!= BUILT_IN_EXECVE
5818 && fcode
!= BUILT_IN_ALLOCA
5819 && fcode
!= BUILT_IN_ALLOCA_WITH_ALIGN
5820 && fcode
!= BUILT_IN_FREE
5821 && fcode
!= BUILT_IN_CHKP_SET_PTR_BOUNDS
5822 && fcode
!= BUILT_IN_CHKP_INIT_PTR_BOUNDS
5823 && fcode
!= BUILT_IN_CHKP_NULL_PTR_BOUNDS
5824 && fcode
!= BUILT_IN_CHKP_COPY_PTR_BOUNDS
5825 && fcode
!= BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5826 && fcode
!= BUILT_IN_CHKP_STORE_PTR_BOUNDS
5827 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5828 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5829 && fcode
!= BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5830 && fcode
!= BUILT_IN_CHKP_GET_PTR_LBOUND
5831 && fcode
!= BUILT_IN_CHKP_GET_PTR_UBOUND
5832 && fcode
!= BUILT_IN_CHKP_BNDRET
)
5833 return expand_call (exp
, target
, ignore
);
5835 /* The built-in function expanders test for target == const0_rtx
5836 to determine whether the function's result will be ignored. */
5838 target
= const0_rtx
;
5840 /* If the result of a pure or const built-in function is ignored, and
5841 none of its arguments are volatile, we can avoid expanding the
5842 built-in call and just evaluate the arguments for side-effects. */
5843 if (target
== const0_rtx
5844 && ((flags
= flags_from_decl_or_type (fndecl
)) & (ECF_CONST
| ECF_PURE
))
5845 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
5847 bool volatilep
= false;
5849 call_expr_arg_iterator iter
;
5851 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5852 if (TREE_THIS_VOLATILE (arg
))
5860 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5861 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5866 /* expand_builtin_with_bounds is supposed to be used for
5867 instrumented builtin calls. */
5868 gcc_assert (!CALL_WITH_BOUNDS_P (exp
));
5872 CASE_FLT_FN (BUILT_IN_FABS
):
5873 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
5874 case BUILT_IN_FABSD32
:
5875 case BUILT_IN_FABSD64
:
5876 case BUILT_IN_FABSD128
:
5877 target
= expand_builtin_fabs (exp
, target
, subtarget
);
5882 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
5883 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN
):
5884 target
= expand_builtin_copysign (exp
, target
, subtarget
);
5889 /* Just do a normal library call if we were unable to fold
5891 CASE_FLT_FN (BUILT_IN_CABS
):
5894 CASE_FLT_FN (BUILT_IN_FMA
):
5895 target
= expand_builtin_mathfn_ternary (exp
, target
, subtarget
);
5900 CASE_FLT_FN (BUILT_IN_ILOGB
):
5901 if (! flag_unsafe_math_optimizations
)
5904 CASE_FLT_FN (BUILT_IN_ISINF
):
5905 CASE_FLT_FN (BUILT_IN_FINITE
):
5906 case BUILT_IN_ISFINITE
:
5907 case BUILT_IN_ISNORMAL
:
5908 target
= expand_builtin_interclass_mathfn (exp
, target
);
5913 CASE_FLT_FN (BUILT_IN_ICEIL
):
5914 CASE_FLT_FN (BUILT_IN_LCEIL
):
5915 CASE_FLT_FN (BUILT_IN_LLCEIL
):
5916 CASE_FLT_FN (BUILT_IN_LFLOOR
):
5917 CASE_FLT_FN (BUILT_IN_IFLOOR
):
5918 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
5919 target
= expand_builtin_int_roundingfn (exp
, target
);
5924 CASE_FLT_FN (BUILT_IN_IRINT
):
5925 CASE_FLT_FN (BUILT_IN_LRINT
):
5926 CASE_FLT_FN (BUILT_IN_LLRINT
):
5927 CASE_FLT_FN (BUILT_IN_IROUND
):
5928 CASE_FLT_FN (BUILT_IN_LROUND
):
5929 CASE_FLT_FN (BUILT_IN_LLROUND
):
5930 target
= expand_builtin_int_roundingfn_2 (exp
, target
);
5935 CASE_FLT_FN (BUILT_IN_POWI
):
5936 target
= expand_builtin_powi (exp
, target
);
5941 CASE_FLT_FN (BUILT_IN_CEXPI
):
5942 target
= expand_builtin_cexpi (exp
, target
);
5943 gcc_assert (target
);
5946 CASE_FLT_FN (BUILT_IN_SIN
):
5947 CASE_FLT_FN (BUILT_IN_COS
):
5948 if (! flag_unsafe_math_optimizations
)
5950 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
5955 CASE_FLT_FN (BUILT_IN_SINCOS
):
5956 if (! flag_unsafe_math_optimizations
)
5958 target
= expand_builtin_sincos (exp
);
5963 case BUILT_IN_APPLY_ARGS
:
5964 return expand_builtin_apply_args ();
5966 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5967 FUNCTION with a copy of the parameters described by
5968 ARGUMENTS, and ARGSIZE. It returns a block of memory
5969 allocated on the stack into which is stored all the registers
5970 that might possibly be used for returning the result of a
5971 function. ARGUMENTS is the value returned by
5972 __builtin_apply_args. ARGSIZE is the number of bytes of
5973 arguments that must be copied. ??? How should this value be
5974 computed? We'll also need a safe worst case value for varargs
5976 case BUILT_IN_APPLY
:
5977 if (!validate_arglist (exp
, POINTER_TYPE
,
5978 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
5979 && !validate_arglist (exp
, REFERENCE_TYPE
,
5980 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
5986 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
5987 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
5988 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
5990 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
5993 /* __builtin_return (RESULT) causes the function to return the
5994 value described by RESULT. RESULT is address of the block of
5995 memory returned by __builtin_apply. */
5996 case BUILT_IN_RETURN
:
5997 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5998 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6001 case BUILT_IN_SAVEREGS
:
6002 return expand_builtin_saveregs ();
6004 case BUILT_IN_VA_ARG_PACK
:
6005 /* All valid uses of __builtin_va_arg_pack () are removed during
6007 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
6010 case BUILT_IN_VA_ARG_PACK_LEN
:
6011 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6013 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
6016 /* Return the address of the first anonymous stack arg. */
6017 case BUILT_IN_NEXT_ARG
:
6018 if (fold_builtin_next_arg (exp
, false))
6020 return expand_builtin_next_arg ();
6022 case BUILT_IN_CLEAR_CACHE
:
6023 target
= expand_builtin___clear_cache (exp
);
6028 case BUILT_IN_CLASSIFY_TYPE
:
6029 return expand_builtin_classify_type (exp
);
6031 case BUILT_IN_CONSTANT_P
:
6034 case BUILT_IN_FRAME_ADDRESS
:
6035 case BUILT_IN_RETURN_ADDRESS
:
6036 return expand_builtin_frame_address (fndecl
, exp
);
6038 /* Returns the address of the area where the structure is returned.
6040 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6041 if (call_expr_nargs (exp
) != 0
6042 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6043 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6046 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6048 case BUILT_IN_ALLOCA
:
6049 case BUILT_IN_ALLOCA_WITH_ALIGN
:
6050 /* If the allocation stems from the declaration of a variable-sized
6051 object, it cannot accumulate. */
6052 target
= expand_builtin_alloca (exp
, CALL_ALLOCA_FOR_VAR_P (exp
));
6057 case BUILT_IN_STACK_SAVE
:
6058 return expand_stack_save ();
6060 case BUILT_IN_STACK_RESTORE
:
6061 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6064 case BUILT_IN_BSWAP16
:
6065 case BUILT_IN_BSWAP32
:
6066 case BUILT_IN_BSWAP64
:
6067 target
= expand_builtin_bswap (target_mode
, exp
, target
, subtarget
);
6072 CASE_INT_FN (BUILT_IN_FFS
):
6073 target
= expand_builtin_unop (target_mode
, exp
, target
,
6074 subtarget
, ffs_optab
);
6079 CASE_INT_FN (BUILT_IN_CLZ
):
6080 target
= expand_builtin_unop (target_mode
, exp
, target
,
6081 subtarget
, clz_optab
);
6086 CASE_INT_FN (BUILT_IN_CTZ
):
6087 target
= expand_builtin_unop (target_mode
, exp
, target
,
6088 subtarget
, ctz_optab
);
6093 CASE_INT_FN (BUILT_IN_CLRSB
):
6094 target
= expand_builtin_unop (target_mode
, exp
, target
,
6095 subtarget
, clrsb_optab
);
6100 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6101 target
= expand_builtin_unop (target_mode
, exp
, target
,
6102 subtarget
, popcount_optab
);
6107 CASE_INT_FN (BUILT_IN_PARITY
):
6108 target
= expand_builtin_unop (target_mode
, exp
, target
,
6109 subtarget
, parity_optab
);
6114 case BUILT_IN_STRLEN
:
6115 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6120 case BUILT_IN_STRCPY
:
6121 target
= expand_builtin_strcpy (exp
, target
);
6126 case BUILT_IN_STRNCPY
:
6127 target
= expand_builtin_strncpy (exp
, target
);
6132 case BUILT_IN_STPCPY
:
6133 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6138 case BUILT_IN_MEMCPY
:
6139 target
= expand_builtin_memcpy (exp
, target
);
6144 case BUILT_IN_MEMPCPY
:
6145 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6150 case BUILT_IN_MEMSET
:
6151 target
= expand_builtin_memset (exp
, target
, mode
);
6156 case BUILT_IN_BZERO
:
6157 target
= expand_builtin_bzero (exp
);
6162 case BUILT_IN_STRCMP
:
6163 target
= expand_builtin_strcmp (exp
, target
);
6168 case BUILT_IN_STRNCMP
:
6169 target
= expand_builtin_strncmp (exp
, target
, mode
);
6175 case BUILT_IN_MEMCMP
:
6176 case BUILT_IN_MEMCMP_EQ
:
6177 target
= expand_builtin_memcmp (exp
, target
, fcode
== BUILT_IN_MEMCMP_EQ
);
6180 if (fcode
== BUILT_IN_MEMCMP_EQ
)
6182 tree newdecl
= builtin_decl_explicit (BUILT_IN_MEMCMP
);
6183 TREE_OPERAND (exp
, 1) = build_fold_addr_expr (newdecl
);
6187 case BUILT_IN_SETJMP
:
6188 /* This should have been lowered to the builtins below. */
6191 case BUILT_IN_SETJMP_SETUP
:
6192 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6193 and the receiver label. */
6194 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6196 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6197 VOIDmode
, EXPAND_NORMAL
);
6198 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6199 rtx_insn
*label_r
= label_rtx (label
);
6201 /* This is copied from the handling of non-local gotos. */
6202 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6203 nonlocal_goto_handler_labels
6204 = gen_rtx_INSN_LIST (VOIDmode
, label_r
,
6205 nonlocal_goto_handler_labels
);
6206 /* ??? Do not let expand_label treat us as such since we would
6207 not want to be both on the list of non-local labels and on
6208 the list of forced labels. */
6209 FORCED_LABEL (label
) = 0;
6214 case BUILT_IN_SETJMP_RECEIVER
:
6215 /* __builtin_setjmp_receiver is passed the receiver label. */
6216 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6218 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6219 rtx_insn
*label_r
= label_rtx (label
);
6221 expand_builtin_setjmp_receiver (label_r
);
6226 /* __builtin_longjmp is passed a pointer to an array of five words.
6227 It's similar to the C library longjmp function but works with
6228 __builtin_setjmp above. */
6229 case BUILT_IN_LONGJMP
:
6230 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6232 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6233 VOIDmode
, EXPAND_NORMAL
);
6234 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6236 if (value
!= const1_rtx
)
6238 error ("%<__builtin_longjmp%> second argument must be 1");
6242 expand_builtin_longjmp (buf_addr
, value
);
6247 case BUILT_IN_NONLOCAL_GOTO
:
6248 target
= expand_builtin_nonlocal_goto (exp
);
6253 /* This updates the setjmp buffer that is its argument with the value
6254 of the current stack pointer. */
6255 case BUILT_IN_UPDATE_SETJMP_BUF
:
6256 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6259 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6261 expand_builtin_update_setjmp_buf (buf_addr
);
6267 expand_builtin_trap ();
6270 case BUILT_IN_UNREACHABLE
:
6271 expand_builtin_unreachable ();
6274 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6275 case BUILT_IN_SIGNBITD32
:
6276 case BUILT_IN_SIGNBITD64
:
6277 case BUILT_IN_SIGNBITD128
:
6278 target
= expand_builtin_signbit (exp
, target
);
6283 /* Various hooks for the DWARF 2 __throw routine. */
6284 case BUILT_IN_UNWIND_INIT
:
6285 expand_builtin_unwind_init ();
6287 case BUILT_IN_DWARF_CFA
:
6288 return virtual_cfa_rtx
;
6289 #ifdef DWARF2_UNWIND_INFO
6290 case BUILT_IN_DWARF_SP_COLUMN
:
6291 return expand_builtin_dwarf_sp_column ();
6292 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6293 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6296 case BUILT_IN_FROB_RETURN_ADDR
:
6297 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6298 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6299 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6300 case BUILT_IN_EH_RETURN
:
6301 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6302 CALL_EXPR_ARG (exp
, 1));
6304 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6305 return expand_builtin_eh_return_data_regno (exp
);
6306 case BUILT_IN_EXTEND_POINTER
:
6307 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6308 case BUILT_IN_EH_POINTER
:
6309 return expand_builtin_eh_pointer (exp
);
6310 case BUILT_IN_EH_FILTER
:
6311 return expand_builtin_eh_filter (exp
);
6312 case BUILT_IN_EH_COPY_VALUES
:
6313 return expand_builtin_eh_copy_values (exp
);
6315 case BUILT_IN_VA_START
:
6316 return expand_builtin_va_start (exp
);
6317 case BUILT_IN_VA_END
:
6318 return expand_builtin_va_end (exp
);
6319 case BUILT_IN_VA_COPY
:
6320 return expand_builtin_va_copy (exp
);
6321 case BUILT_IN_EXPECT
:
6322 return expand_builtin_expect (exp
, target
);
6323 case BUILT_IN_ASSUME_ALIGNED
:
6324 return expand_builtin_assume_aligned (exp
, target
);
6325 case BUILT_IN_PREFETCH
:
6326 expand_builtin_prefetch (exp
);
6329 case BUILT_IN_INIT_TRAMPOLINE
:
6330 return expand_builtin_init_trampoline (exp
, true);
6331 case BUILT_IN_INIT_HEAP_TRAMPOLINE
:
6332 return expand_builtin_init_trampoline (exp
, false);
6333 case BUILT_IN_ADJUST_TRAMPOLINE
:
6334 return expand_builtin_adjust_trampoline (exp
);
6337 case BUILT_IN_EXECL
:
6338 case BUILT_IN_EXECV
:
6339 case BUILT_IN_EXECLP
:
6340 case BUILT_IN_EXECLE
:
6341 case BUILT_IN_EXECVP
:
6342 case BUILT_IN_EXECVE
:
6343 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6348 case BUILT_IN_SYNC_FETCH_AND_ADD_1
:
6349 case BUILT_IN_SYNC_FETCH_AND_ADD_2
:
6350 case BUILT_IN_SYNC_FETCH_AND_ADD_4
:
6351 case BUILT_IN_SYNC_FETCH_AND_ADD_8
:
6352 case BUILT_IN_SYNC_FETCH_AND_ADD_16
:
6353 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_ADD_1
);
6354 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, false, target
);
6359 case BUILT_IN_SYNC_FETCH_AND_SUB_1
:
6360 case BUILT_IN_SYNC_FETCH_AND_SUB_2
:
6361 case BUILT_IN_SYNC_FETCH_AND_SUB_4
:
6362 case BUILT_IN_SYNC_FETCH_AND_SUB_8
:
6363 case BUILT_IN_SYNC_FETCH_AND_SUB_16
:
6364 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_SUB_1
);
6365 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, false, target
);
6370 case BUILT_IN_SYNC_FETCH_AND_OR_1
:
6371 case BUILT_IN_SYNC_FETCH_AND_OR_2
:
6372 case BUILT_IN_SYNC_FETCH_AND_OR_4
:
6373 case BUILT_IN_SYNC_FETCH_AND_OR_8
:
6374 case BUILT_IN_SYNC_FETCH_AND_OR_16
:
6375 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_OR_1
);
6376 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, false, target
);
6381 case BUILT_IN_SYNC_FETCH_AND_AND_1
:
6382 case BUILT_IN_SYNC_FETCH_AND_AND_2
:
6383 case BUILT_IN_SYNC_FETCH_AND_AND_4
:
6384 case BUILT_IN_SYNC_FETCH_AND_AND_8
:
6385 case BUILT_IN_SYNC_FETCH_AND_AND_16
:
6386 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_AND_1
);
6387 target
= expand_builtin_sync_operation (mode
, exp
, AND
, false, target
);
6392 case BUILT_IN_SYNC_FETCH_AND_XOR_1
:
6393 case BUILT_IN_SYNC_FETCH_AND_XOR_2
:
6394 case BUILT_IN_SYNC_FETCH_AND_XOR_4
:
6395 case BUILT_IN_SYNC_FETCH_AND_XOR_8
:
6396 case BUILT_IN_SYNC_FETCH_AND_XOR_16
:
6397 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_XOR_1
);
6398 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, false, target
);
6403 case BUILT_IN_SYNC_FETCH_AND_NAND_1
:
6404 case BUILT_IN_SYNC_FETCH_AND_NAND_2
:
6405 case BUILT_IN_SYNC_FETCH_AND_NAND_4
:
6406 case BUILT_IN_SYNC_FETCH_AND_NAND_8
:
6407 case BUILT_IN_SYNC_FETCH_AND_NAND_16
:
6408 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_FETCH_AND_NAND_1
);
6409 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, false, target
);
6414 case BUILT_IN_SYNC_ADD_AND_FETCH_1
:
6415 case BUILT_IN_SYNC_ADD_AND_FETCH_2
:
6416 case BUILT_IN_SYNC_ADD_AND_FETCH_4
:
6417 case BUILT_IN_SYNC_ADD_AND_FETCH_8
:
6418 case BUILT_IN_SYNC_ADD_AND_FETCH_16
:
6419 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_ADD_AND_FETCH_1
);
6420 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
, true, target
);
6425 case BUILT_IN_SYNC_SUB_AND_FETCH_1
:
6426 case BUILT_IN_SYNC_SUB_AND_FETCH_2
:
6427 case BUILT_IN_SYNC_SUB_AND_FETCH_4
:
6428 case BUILT_IN_SYNC_SUB_AND_FETCH_8
:
6429 case BUILT_IN_SYNC_SUB_AND_FETCH_16
:
6430 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_SUB_AND_FETCH_1
);
6431 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
, true, target
);
6436 case BUILT_IN_SYNC_OR_AND_FETCH_1
:
6437 case BUILT_IN_SYNC_OR_AND_FETCH_2
:
6438 case BUILT_IN_SYNC_OR_AND_FETCH_4
:
6439 case BUILT_IN_SYNC_OR_AND_FETCH_8
:
6440 case BUILT_IN_SYNC_OR_AND_FETCH_16
:
6441 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_OR_AND_FETCH_1
);
6442 target
= expand_builtin_sync_operation (mode
, exp
, IOR
, true, target
);
6447 case BUILT_IN_SYNC_AND_AND_FETCH_1
:
6448 case BUILT_IN_SYNC_AND_AND_FETCH_2
:
6449 case BUILT_IN_SYNC_AND_AND_FETCH_4
:
6450 case BUILT_IN_SYNC_AND_AND_FETCH_8
:
6451 case BUILT_IN_SYNC_AND_AND_FETCH_16
:
6452 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_AND_AND_FETCH_1
);
6453 target
= expand_builtin_sync_operation (mode
, exp
, AND
, true, target
);
6458 case BUILT_IN_SYNC_XOR_AND_FETCH_1
:
6459 case BUILT_IN_SYNC_XOR_AND_FETCH_2
:
6460 case BUILT_IN_SYNC_XOR_AND_FETCH_4
:
6461 case BUILT_IN_SYNC_XOR_AND_FETCH_8
:
6462 case BUILT_IN_SYNC_XOR_AND_FETCH_16
:
6463 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_XOR_AND_FETCH_1
);
6464 target
= expand_builtin_sync_operation (mode
, exp
, XOR
, true, target
);
6469 case BUILT_IN_SYNC_NAND_AND_FETCH_1
:
6470 case BUILT_IN_SYNC_NAND_AND_FETCH_2
:
6471 case BUILT_IN_SYNC_NAND_AND_FETCH_4
:
6472 case BUILT_IN_SYNC_NAND_AND_FETCH_8
:
6473 case BUILT_IN_SYNC_NAND_AND_FETCH_16
:
6474 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_NAND_AND_FETCH_1
);
6475 target
= expand_builtin_sync_operation (mode
, exp
, NOT
, true, target
);
6480 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
:
6481 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2
:
6482 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4
:
6483 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8
:
6484 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16
:
6485 if (mode
== VOIDmode
)
6486 mode
= TYPE_MODE (boolean_type_node
);
6487 if (!target
|| !register_operand (target
, mode
))
6488 target
= gen_reg_rtx (mode
);
6490 mode
= get_builtin_sync_mode
6491 (fcode
- BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1
);
6492 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6497 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
:
6498 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2
:
6499 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4
:
6500 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8
:
6501 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16
:
6502 mode
= get_builtin_sync_mode
6503 (fcode
- BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1
);
6504 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6509 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
:
6510 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2
:
6511 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4
:
6512 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8
:
6513 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16
:
6514 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_TEST_AND_SET_1
);
6515 target
= expand_builtin_sync_lock_test_and_set (mode
, exp
, target
);
6520 case BUILT_IN_SYNC_LOCK_RELEASE_1
:
6521 case BUILT_IN_SYNC_LOCK_RELEASE_2
:
6522 case BUILT_IN_SYNC_LOCK_RELEASE_4
:
6523 case BUILT_IN_SYNC_LOCK_RELEASE_8
:
6524 case BUILT_IN_SYNC_LOCK_RELEASE_16
:
6525 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SYNC_LOCK_RELEASE_1
);
6526 expand_builtin_sync_lock_release (mode
, exp
);
6529 case BUILT_IN_SYNC_SYNCHRONIZE
:
6530 expand_builtin_sync_synchronize ();
6533 case BUILT_IN_ATOMIC_EXCHANGE_1
:
6534 case BUILT_IN_ATOMIC_EXCHANGE_2
:
6535 case BUILT_IN_ATOMIC_EXCHANGE_4
:
6536 case BUILT_IN_ATOMIC_EXCHANGE_8
:
6537 case BUILT_IN_ATOMIC_EXCHANGE_16
:
6538 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_EXCHANGE_1
);
6539 target
= expand_builtin_atomic_exchange (mode
, exp
, target
);
6544 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
6545 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
6546 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
6547 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
6548 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
6550 unsigned int nargs
, z
;
6551 vec
<tree
, va_gc
> *vec
;
6554 get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
);
6555 target
= expand_builtin_atomic_compare_exchange (mode
, exp
, target
);
6559 /* If this is turned into an external library call, the weak parameter
6560 must be dropped to match the expected parameter list. */
6561 nargs
= call_expr_nargs (exp
);
6562 vec_alloc (vec
, nargs
- 1);
6563 for (z
= 0; z
< 3; z
++)
6564 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6565 /* Skip the boolean weak parameter. */
6566 for (z
= 4; z
< 6; z
++)
6567 vec
->quick_push (CALL_EXPR_ARG (exp
, z
));
6568 exp
= build_call_vec (TREE_TYPE (exp
), CALL_EXPR_FN (exp
), vec
);
6572 case BUILT_IN_ATOMIC_LOAD_1
:
6573 case BUILT_IN_ATOMIC_LOAD_2
:
6574 case BUILT_IN_ATOMIC_LOAD_4
:
6575 case BUILT_IN_ATOMIC_LOAD_8
:
6576 case BUILT_IN_ATOMIC_LOAD_16
:
6577 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_LOAD_1
);
6578 target
= expand_builtin_atomic_load (mode
, exp
, target
);
6583 case BUILT_IN_ATOMIC_STORE_1
:
6584 case BUILT_IN_ATOMIC_STORE_2
:
6585 case BUILT_IN_ATOMIC_STORE_4
:
6586 case BUILT_IN_ATOMIC_STORE_8
:
6587 case BUILT_IN_ATOMIC_STORE_16
:
6588 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_STORE_1
);
6589 target
= expand_builtin_atomic_store (mode
, exp
);
6594 case BUILT_IN_ATOMIC_ADD_FETCH_1
:
6595 case BUILT_IN_ATOMIC_ADD_FETCH_2
:
6596 case BUILT_IN_ATOMIC_ADD_FETCH_4
:
6597 case BUILT_IN_ATOMIC_ADD_FETCH_8
:
6598 case BUILT_IN_ATOMIC_ADD_FETCH_16
:
6600 enum built_in_function lib
;
6601 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
);
6602 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_ADD_1
+
6603 (fcode
- BUILT_IN_ATOMIC_ADD_FETCH_1
));
6604 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, true,
6610 case BUILT_IN_ATOMIC_SUB_FETCH_1
:
6611 case BUILT_IN_ATOMIC_SUB_FETCH_2
:
6612 case BUILT_IN_ATOMIC_SUB_FETCH_4
:
6613 case BUILT_IN_ATOMIC_SUB_FETCH_8
:
6614 case BUILT_IN_ATOMIC_SUB_FETCH_16
:
6616 enum built_in_function lib
;
6617 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
);
6618 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_SUB_1
+
6619 (fcode
- BUILT_IN_ATOMIC_SUB_FETCH_1
));
6620 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, true,
6626 case BUILT_IN_ATOMIC_AND_FETCH_1
:
6627 case BUILT_IN_ATOMIC_AND_FETCH_2
:
6628 case BUILT_IN_ATOMIC_AND_FETCH_4
:
6629 case BUILT_IN_ATOMIC_AND_FETCH_8
:
6630 case BUILT_IN_ATOMIC_AND_FETCH_16
:
6632 enum built_in_function lib
;
6633 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
);
6634 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_AND_1
+
6635 (fcode
- BUILT_IN_ATOMIC_AND_FETCH_1
));
6636 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, true,
6642 case BUILT_IN_ATOMIC_NAND_FETCH_1
:
6643 case BUILT_IN_ATOMIC_NAND_FETCH_2
:
6644 case BUILT_IN_ATOMIC_NAND_FETCH_4
:
6645 case BUILT_IN_ATOMIC_NAND_FETCH_8
:
6646 case BUILT_IN_ATOMIC_NAND_FETCH_16
:
6648 enum built_in_function lib
;
6649 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
);
6650 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_NAND_1
+
6651 (fcode
- BUILT_IN_ATOMIC_NAND_FETCH_1
));
6652 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, true,
6658 case BUILT_IN_ATOMIC_XOR_FETCH_1
:
6659 case BUILT_IN_ATOMIC_XOR_FETCH_2
:
6660 case BUILT_IN_ATOMIC_XOR_FETCH_4
:
6661 case BUILT_IN_ATOMIC_XOR_FETCH_8
:
6662 case BUILT_IN_ATOMIC_XOR_FETCH_16
:
6664 enum built_in_function lib
;
6665 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
);
6666 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_XOR_1
+
6667 (fcode
- BUILT_IN_ATOMIC_XOR_FETCH_1
));
6668 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, true,
6674 case BUILT_IN_ATOMIC_OR_FETCH_1
:
6675 case BUILT_IN_ATOMIC_OR_FETCH_2
:
6676 case BUILT_IN_ATOMIC_OR_FETCH_4
:
6677 case BUILT_IN_ATOMIC_OR_FETCH_8
:
6678 case BUILT_IN_ATOMIC_OR_FETCH_16
:
6680 enum built_in_function lib
;
6681 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
);
6682 lib
= (enum built_in_function
)((int)BUILT_IN_ATOMIC_FETCH_OR_1
+
6683 (fcode
- BUILT_IN_ATOMIC_OR_FETCH_1
));
6684 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, true,
6690 case BUILT_IN_ATOMIC_FETCH_ADD_1
:
6691 case BUILT_IN_ATOMIC_FETCH_ADD_2
:
6692 case BUILT_IN_ATOMIC_FETCH_ADD_4
:
6693 case BUILT_IN_ATOMIC_FETCH_ADD_8
:
6694 case BUILT_IN_ATOMIC_FETCH_ADD_16
:
6695 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_ADD_1
);
6696 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, PLUS
, false,
6697 ignore
, BUILT_IN_NONE
);
6702 case BUILT_IN_ATOMIC_FETCH_SUB_1
:
6703 case BUILT_IN_ATOMIC_FETCH_SUB_2
:
6704 case BUILT_IN_ATOMIC_FETCH_SUB_4
:
6705 case BUILT_IN_ATOMIC_FETCH_SUB_8
:
6706 case BUILT_IN_ATOMIC_FETCH_SUB_16
:
6707 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_SUB_1
);
6708 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, MINUS
, false,
6709 ignore
, BUILT_IN_NONE
);
6714 case BUILT_IN_ATOMIC_FETCH_AND_1
:
6715 case BUILT_IN_ATOMIC_FETCH_AND_2
:
6716 case BUILT_IN_ATOMIC_FETCH_AND_4
:
6717 case BUILT_IN_ATOMIC_FETCH_AND_8
:
6718 case BUILT_IN_ATOMIC_FETCH_AND_16
:
6719 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_AND_1
);
6720 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, AND
, false,
6721 ignore
, BUILT_IN_NONE
);
6726 case BUILT_IN_ATOMIC_FETCH_NAND_1
:
6727 case BUILT_IN_ATOMIC_FETCH_NAND_2
:
6728 case BUILT_IN_ATOMIC_FETCH_NAND_4
:
6729 case BUILT_IN_ATOMIC_FETCH_NAND_8
:
6730 case BUILT_IN_ATOMIC_FETCH_NAND_16
:
6731 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_NAND_1
);
6732 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, NOT
, false,
6733 ignore
, BUILT_IN_NONE
);
6738 case BUILT_IN_ATOMIC_FETCH_XOR_1
:
6739 case BUILT_IN_ATOMIC_FETCH_XOR_2
:
6740 case BUILT_IN_ATOMIC_FETCH_XOR_4
:
6741 case BUILT_IN_ATOMIC_FETCH_XOR_8
:
6742 case BUILT_IN_ATOMIC_FETCH_XOR_16
:
6743 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_XOR_1
);
6744 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, XOR
, false,
6745 ignore
, BUILT_IN_NONE
);
6750 case BUILT_IN_ATOMIC_FETCH_OR_1
:
6751 case BUILT_IN_ATOMIC_FETCH_OR_2
:
6752 case BUILT_IN_ATOMIC_FETCH_OR_4
:
6753 case BUILT_IN_ATOMIC_FETCH_OR_8
:
6754 case BUILT_IN_ATOMIC_FETCH_OR_16
:
6755 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ATOMIC_FETCH_OR_1
);
6756 target
= expand_builtin_atomic_fetch_op (mode
, exp
, target
, IOR
, false,
6757 ignore
, BUILT_IN_NONE
);
6762 case BUILT_IN_ATOMIC_TEST_AND_SET
:
6763 return expand_builtin_atomic_test_and_set (exp
, target
);
6765 case BUILT_IN_ATOMIC_CLEAR
:
6766 return expand_builtin_atomic_clear (exp
);
6768 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
6769 return expand_builtin_atomic_always_lock_free (exp
);
6771 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
6772 target
= expand_builtin_atomic_is_lock_free (exp
);
6777 case BUILT_IN_ATOMIC_THREAD_FENCE
:
6778 expand_builtin_atomic_thread_fence (exp
);
6781 case BUILT_IN_ATOMIC_SIGNAL_FENCE
:
6782 expand_builtin_atomic_signal_fence (exp
);
6785 case BUILT_IN_OBJECT_SIZE
:
6786 return expand_builtin_object_size (exp
);
6788 case BUILT_IN_MEMCPY_CHK
:
6789 case BUILT_IN_MEMPCPY_CHK
:
6790 case BUILT_IN_MEMMOVE_CHK
:
6791 case BUILT_IN_MEMSET_CHK
:
6792 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
6797 case BUILT_IN_STRCPY_CHK
:
6798 case BUILT_IN_STPCPY_CHK
:
6799 case BUILT_IN_STRNCPY_CHK
:
6800 case BUILT_IN_STPNCPY_CHK
:
6801 case BUILT_IN_STRCAT_CHK
:
6802 case BUILT_IN_STRNCAT_CHK
:
6803 case BUILT_IN_SNPRINTF_CHK
:
6804 case BUILT_IN_VSNPRINTF_CHK
:
6805 maybe_emit_chk_warning (exp
, fcode
);
6808 case BUILT_IN_SPRINTF_CHK
:
6809 case BUILT_IN_VSPRINTF_CHK
:
6810 maybe_emit_sprintf_chk_warning (exp
, fcode
);
6814 if (warn_free_nonheap_object
)
6815 maybe_emit_free_warning (exp
);
6818 case BUILT_IN_THREAD_POINTER
:
6819 return expand_builtin_thread_pointer (exp
, target
);
6821 case BUILT_IN_SET_THREAD_POINTER
:
6822 expand_builtin_set_thread_pointer (exp
);
6825 case BUILT_IN_CILK_DETACH
:
6826 expand_builtin_cilk_detach (exp
);
6829 case BUILT_IN_CILK_POP_FRAME
:
6830 expand_builtin_cilk_pop_frame (exp
);
6833 case BUILT_IN_CHKP_INIT_PTR_BOUNDS
:
6834 case BUILT_IN_CHKP_NULL_PTR_BOUNDS
:
6835 case BUILT_IN_CHKP_COPY_PTR_BOUNDS
:
6836 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
:
6837 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
:
6838 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS
:
6839 case BUILT_IN_CHKP_SET_PTR_BOUNDS
:
6840 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS
:
6841 case BUILT_IN_CHKP_STORE_PTR_BOUNDS
:
6842 case BUILT_IN_CHKP_GET_PTR_LBOUND
:
6843 case BUILT_IN_CHKP_GET_PTR_UBOUND
:
6844 /* We allow user CHKP builtins if Pointer Bounds
6846 if (!chkp_function_instrumented_p (current_function_decl
))
6848 if (fcode
== BUILT_IN_CHKP_SET_PTR_BOUNDS
6849 || fcode
== BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6850 || fcode
== BUILT_IN_CHKP_INIT_PTR_BOUNDS
6851 || fcode
== BUILT_IN_CHKP_NULL_PTR_BOUNDS
6852 || fcode
== BUILT_IN_CHKP_COPY_PTR_BOUNDS
)
6853 return expand_normal (CALL_EXPR_ARG (exp
, 0));
6854 else if (fcode
== BUILT_IN_CHKP_GET_PTR_LBOUND
)
6855 return expand_normal (size_zero_node
);
6856 else if (fcode
== BUILT_IN_CHKP_GET_PTR_UBOUND
)
6857 return expand_normal (size_int (-1));
6863 case BUILT_IN_CHKP_BNDMK
:
6864 case BUILT_IN_CHKP_BNDSTX
:
6865 case BUILT_IN_CHKP_BNDCL
:
6866 case BUILT_IN_CHKP_BNDCU
:
6867 case BUILT_IN_CHKP_BNDLDX
:
6868 case BUILT_IN_CHKP_BNDRET
:
6869 case BUILT_IN_CHKP_INTERSECT
:
6870 case BUILT_IN_CHKP_NARROW
:
6871 case BUILT_IN_CHKP_EXTRACT_LOWER
:
6872 case BUILT_IN_CHKP_EXTRACT_UPPER
:
6873 /* Software implementation of Pointer Bounds Checker is NYI.
6874 Target support is required. */
6875 error ("Your target platform does not support -fcheck-pointer-bounds");
6878 case BUILT_IN_ACC_ON_DEVICE
:
6879 /* Do library call, if we failed to expand the builtin when
6883 default: /* just do library call, if unknown builtin */
6887 /* The switch statement above can drop through to cause the function
6888 to be called normally. */
6889 return expand_call (exp
, target
, ignore
);
6892 /* Similar to expand_builtin but is used for instrumented calls. */
6895 expand_builtin_with_bounds (tree exp
, rtx target
,
6896 rtx subtarget ATTRIBUTE_UNUSED
,
6897 machine_mode mode
, int ignore
)
6899 tree fndecl
= get_callee_fndecl (exp
);
6900 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
6902 gcc_assert (CALL_WITH_BOUNDS_P (exp
));
6904 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6905 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
6907 gcc_assert (fcode
> BEGIN_CHKP_BUILTINS
6908 && fcode
< END_CHKP_BUILTINS
);
6912 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP
:
6913 target
= expand_builtin_memcpy_with_bounds (exp
, target
);
6918 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
:
6919 target
= expand_builtin_mempcpy_with_bounds (exp
, target
, mode
);
6924 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP
:
6925 target
= expand_builtin_memset_with_bounds (exp
, target
, mode
);
6934 /* The switch statement above can drop through to cause the function
6935 to be called normally. */
6936 return expand_call (exp
, target
, ignore
);
6939 /* Determine whether a tree node represents a call to a built-in
6940 function. If the tree T is a call to a built-in function with
6941 the right number of arguments of the appropriate types, return
6942 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6943 Otherwise the return value is END_BUILTINS. */
6945 enum built_in_function
6946 builtin_mathfn_code (const_tree t
)
6948 const_tree fndecl
, arg
, parmlist
;
6949 const_tree argtype
, parmtype
;
6950 const_call_expr_arg_iterator iter
;
6952 if (TREE_CODE (t
) != CALL_EXPR
6953 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
6954 return END_BUILTINS
;
6956 fndecl
= get_callee_fndecl (t
);
6957 if (fndecl
== NULL_TREE
6958 || TREE_CODE (fndecl
) != FUNCTION_DECL
6959 || ! DECL_BUILT_IN (fndecl
)
6960 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6961 return END_BUILTINS
;
6963 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
6964 init_const_call_expr_arg_iterator (t
, &iter
);
6965 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
6967 /* If a function doesn't take a variable number of arguments,
6968 the last element in the list will have type `void'. */
6969 parmtype
= TREE_VALUE (parmlist
);
6970 if (VOID_TYPE_P (parmtype
))
6972 if (more_const_call_expr_args_p (&iter
))
6973 return END_BUILTINS
;
6974 return DECL_FUNCTION_CODE (fndecl
);
6977 if (! more_const_call_expr_args_p (&iter
))
6978 return END_BUILTINS
;
6980 arg
= next_const_call_expr_arg (&iter
);
6981 argtype
= TREE_TYPE (arg
);
6983 if (SCALAR_FLOAT_TYPE_P (parmtype
))
6985 if (! SCALAR_FLOAT_TYPE_P (argtype
))
6986 return END_BUILTINS
;
6988 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
6990 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
6991 return END_BUILTINS
;
6993 else if (POINTER_TYPE_P (parmtype
))
6995 if (! POINTER_TYPE_P (argtype
))
6996 return END_BUILTINS
;
6998 else if (INTEGRAL_TYPE_P (parmtype
))
7000 if (! INTEGRAL_TYPE_P (argtype
))
7001 return END_BUILTINS
;
7004 return END_BUILTINS
;
7007 /* Variable-length argument list. */
7008 return DECL_FUNCTION_CODE (fndecl
);
7011 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7012 evaluate to a constant. */
7015 fold_builtin_constant_p (tree arg
)
7017 /* We return 1 for a numeric type that's known to be a constant
7018 value at compile-time or for an aggregate type that's a
7019 literal constant. */
7022 /* If we know this is a constant, emit the constant of one. */
7023 if (CONSTANT_CLASS_P (arg
)
7024 || (TREE_CODE (arg
) == CONSTRUCTOR
7025 && TREE_CONSTANT (arg
)))
7026 return integer_one_node
;
7027 if (TREE_CODE (arg
) == ADDR_EXPR
)
7029 tree op
= TREE_OPERAND (arg
, 0);
7030 if (TREE_CODE (op
) == STRING_CST
7031 || (TREE_CODE (op
) == ARRAY_REF
7032 && integer_zerop (TREE_OPERAND (op
, 1))
7033 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
7034 return integer_one_node
;
7037 /* If this expression has side effects, show we don't know it to be a
7038 constant. Likewise if it's a pointer or aggregate type since in
7039 those case we only want literals, since those are only optimized
7040 when generating RTL, not later.
7041 And finally, if we are compiling an initializer, not code, we
7042 need to return a definite result now; there's not going to be any
7043 more optimization done. */
7044 if (TREE_SIDE_EFFECTS (arg
)
7045 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
7046 || POINTER_TYPE_P (TREE_TYPE (arg
))
7048 || folding_initializer
7049 || force_folding_builtin_constant_p
)
7050 return integer_zero_node
;
7055 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7056 return it as a truthvalue. */
7059 build_builtin_expect_predicate (location_t loc
, tree pred
, tree expected
,
7062 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
7064 fn
= builtin_decl_explicit (BUILT_IN_EXPECT
);
7065 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
7066 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
7067 pred_type
= TREE_VALUE (arg_types
);
7068 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
7070 pred
= fold_convert_loc (loc
, pred_type
, pred
);
7071 expected
= fold_convert_loc (loc
, expected_type
, expected
);
7072 call_expr
= build_call_expr_loc (loc
, fn
, predictor
? 3 : 2, pred
, expected
,
7075 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
7076 build_int_cst (ret_type
, 0));
7079 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7080 NULL_TREE if no simplification is possible. */
7083 fold_builtin_expect (location_t loc
, tree arg0
, tree arg1
, tree arg2
)
7085 tree inner
, fndecl
, inner_arg0
;
7086 enum tree_code code
;
7088 /* Distribute the expected value over short-circuiting operators.
7089 See through the cast from truthvalue_type_node to long. */
7091 while (CONVERT_EXPR_P (inner_arg0
)
7092 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0
))
7093 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0
, 0))))
7094 inner_arg0
= TREE_OPERAND (inner_arg0
, 0);
7096 /* If this is a builtin_expect within a builtin_expect keep the
7097 inner one. See through a comparison against a constant. It
7098 might have been added to create a thruthvalue. */
7101 if (COMPARISON_CLASS_P (inner
)
7102 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
7103 inner
= TREE_OPERAND (inner
, 0);
7105 if (TREE_CODE (inner
) == CALL_EXPR
7106 && (fndecl
= get_callee_fndecl (inner
))
7107 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7108 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
7112 code
= TREE_CODE (inner
);
7113 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
7115 tree op0
= TREE_OPERAND (inner
, 0);
7116 tree op1
= TREE_OPERAND (inner
, 1);
7118 op0
= build_builtin_expect_predicate (loc
, op0
, arg1
, arg2
);
7119 op1
= build_builtin_expect_predicate (loc
, op1
, arg1
, arg2
);
7120 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
7122 return fold_convert_loc (loc
, TREE_TYPE (arg0
), inner
);
7125 /* If the argument isn't invariant then there's nothing else we can do. */
7126 if (!TREE_CONSTANT (inner_arg0
))
7129 /* If we expect that a comparison against the argument will fold to
7130 a constant return the constant. In practice, this means a true
7131 constant or the address of a non-weak symbol. */
7134 if (TREE_CODE (inner
) == ADDR_EXPR
)
7138 inner
= TREE_OPERAND (inner
, 0);
7140 while (TREE_CODE (inner
) == COMPONENT_REF
7141 || TREE_CODE (inner
) == ARRAY_REF
);
7142 if (VAR_OR_FUNCTION_DECL_P (inner
) && DECL_WEAK (inner
))
7146 /* Otherwise, ARG0 already has the proper type for the return value. */
7150 /* Fold a call to __builtin_classify_type with argument ARG. */
7153 fold_builtin_classify_type (tree arg
)
7156 return build_int_cst (integer_type_node
, no_type_class
);
7158 return build_int_cst (integer_type_node
, type_to_class (TREE_TYPE (arg
)));
7161 /* Fold a call to __builtin_strlen with argument ARG. */
7164 fold_builtin_strlen (location_t loc
, tree type
, tree arg
)
7166 if (!validate_arg (arg
, POINTER_TYPE
))
7170 tree len
= c_strlen (arg
, 0);
7173 return fold_convert_loc (loc
, type
, len
);
7179 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7182 fold_builtin_inf (location_t loc
, tree type
, int warn
)
7184 REAL_VALUE_TYPE real
;
7186 /* __builtin_inff is intended to be usable to define INFINITY on all
7187 targets. If an infinity is not available, INFINITY expands "to a
7188 positive constant of type float that overflows at translation
7189 time", footnote "In this case, using INFINITY will violate the
7190 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7191 Thus we pedwarn to ensure this constraint violation is
7193 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
7194 pedwarn (loc
, 0, "target format does not support infinity");
7197 return build_real (type
, real
);
7200 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7201 NULL_TREE if no simplification can be made. */
7204 fold_builtin_sincos (location_t loc
,
7205 tree arg0
, tree arg1
, tree arg2
)
7208 tree fndecl
, call
= NULL_TREE
;
7210 if (!validate_arg (arg0
, REAL_TYPE
)
7211 || !validate_arg (arg1
, POINTER_TYPE
)
7212 || !validate_arg (arg2
, POINTER_TYPE
))
7215 type
= TREE_TYPE (arg0
);
7217 /* Calculate the result when the argument is a constant. */
7218 built_in_function fn
= mathfn_built_in_2 (type
, CFN_BUILT_IN_CEXPI
);
7219 if (fn
== END_BUILTINS
)
7222 /* Canonicalize sincos to cexpi. */
7223 if (TREE_CODE (arg0
) == REAL_CST
)
7225 tree complex_type
= build_complex_type (type
);
7226 call
= fold_const_call (as_combined_fn (fn
), complex_type
, arg0
);
7230 if (!targetm
.libc_has_function (function_c99_math_complex
)
7231 || !builtin_decl_implicit_p (fn
))
7233 fndecl
= builtin_decl_explicit (fn
);
7234 call
= build_call_expr_loc (loc
, fndecl
, 1, arg0
);
7235 call
= builtin_save_expr (call
);
7238 return build2 (COMPOUND_EXPR
, void_type_node
,
7239 build2 (MODIFY_EXPR
, void_type_node
,
7240 build_fold_indirect_ref_loc (loc
, arg1
),
7241 fold_build1_loc (loc
, IMAGPART_EXPR
, type
, call
)),
7242 build2 (MODIFY_EXPR
, void_type_node
,
7243 build_fold_indirect_ref_loc (loc
, arg2
),
7244 fold_build1_loc (loc
, REALPART_EXPR
, type
, call
)));
7247 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
7248 arguments to the call, and TYPE is its return type.
7249 Return NULL_TREE if no simplification can be made. */
7252 fold_builtin_memchr (location_t loc
, tree arg1
, tree arg2
, tree len
, tree type
)
7254 if (!validate_arg (arg1
, POINTER_TYPE
)
7255 || !validate_arg (arg2
, INTEGER_TYPE
)
7256 || !validate_arg (len
, INTEGER_TYPE
))
7262 if (TREE_CODE (arg2
) != INTEGER_CST
7263 || !tree_fits_uhwi_p (len
))
7266 p1
= c_getstr (arg1
);
7267 if (p1
&& compare_tree_int (len
, strlen (p1
) + 1) <= 0)
7273 if (target_char_cast (arg2
, &c
))
7276 r
= (const char *) memchr (p1
, c
, tree_to_uhwi (len
));
7279 return build_int_cst (TREE_TYPE (arg1
), 0);
7281 tem
= fold_build_pointer_plus_hwi_loc (loc
, arg1
, r
- p1
);
7282 return fold_convert_loc (loc
, type
, tem
);
7288 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7289 Return NULL_TREE if no simplification can be made. */
7292 fold_builtin_memcmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
7294 if (!validate_arg (arg1
, POINTER_TYPE
)
7295 || !validate_arg (arg2
, POINTER_TYPE
)
7296 || !validate_arg (len
, INTEGER_TYPE
))
7299 /* If the LEN parameter is zero, return zero. */
7300 if (integer_zerop (len
))
7301 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
7304 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7305 if (operand_equal_p (arg1
, arg2
, 0))
7306 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
7308 /* If len parameter is one, return an expression corresponding to
7309 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7310 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
7312 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
7313 tree cst_uchar_ptr_node
7314 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
7317 = fold_convert_loc (loc
, integer_type_node
,
7318 build1 (INDIRECT_REF
, cst_uchar_node
,
7319 fold_convert_loc (loc
,
7323 = fold_convert_loc (loc
, integer_type_node
,
7324 build1 (INDIRECT_REF
, cst_uchar_node
,
7325 fold_convert_loc (loc
,
7328 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
7334 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
7335 Return NULL_TREE if no simplification can be made. */
7338 fold_builtin_strcmp (location_t loc
, tree arg1
, tree arg2
)
7340 if (!validate_arg (arg1
, POINTER_TYPE
)
7341 || !validate_arg (arg2
, POINTER_TYPE
))
7344 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7345 if (operand_equal_p (arg1
, arg2
, 0))
7346 return integer_zero_node
;
7348 /* If the second arg is "", return *(const unsigned char*)arg1. */
7349 const char *p2
= c_getstr (arg2
);
7350 if (p2
&& *p2
== '\0')
7352 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
7353 tree cst_uchar_ptr_node
7354 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
7356 return fold_convert_loc (loc
, integer_type_node
,
7357 build1 (INDIRECT_REF
, cst_uchar_node
,
7358 fold_convert_loc (loc
,
7363 /* If the first arg is "", return -*(const unsigned char*)arg2. */
7364 const char *p1
= c_getstr (arg1
);
7365 if (p1
&& *p1
== '\0')
7367 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
7368 tree cst_uchar_ptr_node
7369 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
7372 = fold_convert_loc (loc
, integer_type_node
,
7373 build1 (INDIRECT_REF
, cst_uchar_node
,
7374 fold_convert_loc (loc
,
7377 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
7383 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
7384 Return NULL_TREE if no simplification can be made. */
7387 fold_builtin_strncmp (location_t loc
, tree arg1
, tree arg2
, tree len
)
7389 if (!validate_arg (arg1
, POINTER_TYPE
)
7390 || !validate_arg (arg2
, POINTER_TYPE
)
7391 || !validate_arg (len
, INTEGER_TYPE
))
7394 /* If the LEN parameter is zero, return zero. */
7395 if (integer_zerop (len
))
7396 return omit_two_operands_loc (loc
, integer_type_node
, integer_zero_node
,
7399 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7400 if (operand_equal_p (arg1
, arg2
, 0))
7401 return omit_one_operand_loc (loc
, integer_type_node
, integer_zero_node
, len
);
7403 /* If the second arg is "", and the length is greater than zero,
7404 return *(const unsigned char*)arg1. */
7405 const char *p2
= c_getstr (arg2
);
7406 if (p2
&& *p2
== '\0'
7407 && TREE_CODE (len
) == INTEGER_CST
7408 && tree_int_cst_sgn (len
) == 1)
7410 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
7411 tree cst_uchar_ptr_node
7412 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
7414 return fold_convert_loc (loc
, integer_type_node
,
7415 build1 (INDIRECT_REF
, cst_uchar_node
,
7416 fold_convert_loc (loc
,
7421 /* If the first arg is "", and the length is greater than zero,
7422 return -*(const unsigned char*)arg2. */
7423 const char *p1
= c_getstr (arg1
);
7424 if (p1
&& *p1
== '\0'
7425 && TREE_CODE (len
) == INTEGER_CST
7426 && tree_int_cst_sgn (len
) == 1)
7428 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
7429 tree cst_uchar_ptr_node
7430 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
7432 tree temp
= fold_convert_loc (loc
, integer_type_node
,
7433 build1 (INDIRECT_REF
, cst_uchar_node
,
7434 fold_convert_loc (loc
,
7437 return fold_build1_loc (loc
, NEGATE_EXPR
, integer_type_node
, temp
);
7440 /* If len parameter is one, return an expression corresponding to
7441 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7442 if (tree_fits_uhwi_p (len
) && tree_to_uhwi (len
) == 1)
7444 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
7445 tree cst_uchar_ptr_node
7446 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
7448 tree ind1
= fold_convert_loc (loc
, integer_type_node
,
7449 build1 (INDIRECT_REF
, cst_uchar_node
,
7450 fold_convert_loc (loc
,
7453 tree ind2
= fold_convert_loc (loc
, integer_type_node
,
7454 build1 (INDIRECT_REF
, cst_uchar_node
,
7455 fold_convert_loc (loc
,
7458 return fold_build2_loc (loc
, MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
7464 /* Fold a call to builtin isascii with argument ARG. */
7467 fold_builtin_isascii (location_t loc
, tree arg
)
7469 if (!validate_arg (arg
, INTEGER_TYPE
))
7473 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
7474 arg
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
7475 build_int_cst (integer_type_node
,
7476 ~ (unsigned HOST_WIDE_INT
) 0x7f));
7477 return fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
,
7478 arg
, integer_zero_node
);
7482 /* Fold a call to builtin toascii with argument ARG. */
7485 fold_builtin_toascii (location_t loc
, tree arg
)
7487 if (!validate_arg (arg
, INTEGER_TYPE
))
7490 /* Transform toascii(c) -> (c & 0x7f). */
7491 return fold_build2_loc (loc
, BIT_AND_EXPR
, integer_type_node
, arg
,
7492 build_int_cst (integer_type_node
, 0x7f));
7495 /* Fold a call to builtin isdigit with argument ARG. */
7498 fold_builtin_isdigit (location_t loc
, tree arg
)
7500 if (!validate_arg (arg
, INTEGER_TYPE
))
7504 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
7505 /* According to the C standard, isdigit is unaffected by locale.
7506 However, it definitely is affected by the target character set. */
7507 unsigned HOST_WIDE_INT target_digit0
7508 = lang_hooks
.to_target_charset ('0');
7510 if (target_digit0
== 0)
7513 arg
= fold_convert_loc (loc
, unsigned_type_node
, arg
);
7514 arg
= fold_build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
7515 build_int_cst (unsigned_type_node
, target_digit0
));
7516 return fold_build2_loc (loc
, LE_EXPR
, integer_type_node
, arg
,
7517 build_int_cst (unsigned_type_node
, 9));
7521 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
7524 fold_builtin_fabs (location_t loc
, tree arg
, tree type
)
7526 if (!validate_arg (arg
, REAL_TYPE
))
7529 arg
= fold_convert_loc (loc
, type
, arg
);
7530 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
7533 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
7536 fold_builtin_abs (location_t loc
, tree arg
, tree type
)
7538 if (!validate_arg (arg
, INTEGER_TYPE
))
7541 arg
= fold_convert_loc (loc
, type
, arg
);
7542 return fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
7545 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
7548 fold_builtin_fma (location_t loc
, tree arg0
, tree arg1
, tree arg2
, tree type
)
7550 /* ??? Only expand to FMA_EXPR if it's directly supported. */
7551 if (validate_arg (arg0
, REAL_TYPE
)
7552 && validate_arg (arg1
, REAL_TYPE
)
7553 && validate_arg (arg2
, REAL_TYPE
)
7554 && optab_handler (fma_optab
, TYPE_MODE (type
)) != CODE_FOR_nothing
)
7555 return fold_build3_loc (loc
, FMA_EXPR
, type
, arg0
, arg1
, arg2
);
7560 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
7563 fold_builtin_carg (location_t loc
, tree arg
, tree type
)
7565 if (validate_arg (arg
, COMPLEX_TYPE
)
7566 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == REAL_TYPE
)
7568 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
7572 tree new_arg
= builtin_save_expr (arg
);
7573 tree r_arg
= fold_build1_loc (loc
, REALPART_EXPR
, type
, new_arg
);
7574 tree i_arg
= fold_build1_loc (loc
, IMAGPART_EXPR
, type
, new_arg
);
7575 return build_call_expr_loc (loc
, atan2_fn
, 2, i_arg
, r_arg
);
7582 /* Fold a call to builtin frexp, we can assume the base is 2. */
7585 fold_builtin_frexp (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
7587 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
7592 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
7595 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
7597 /* Proceed if a valid pointer type was passed in. */
7598 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
7600 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
7606 /* For +-0, return (*exp = 0, +-0). */
7607 exp
= integer_zero_node
;
7612 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
7613 return omit_one_operand_loc (loc
, rettype
, arg0
, arg1
);
7616 /* Since the frexp function always expects base 2, and in
7617 GCC normalized significands are already in the range
7618 [0.5, 1.0), we have exactly what frexp wants. */
7619 REAL_VALUE_TYPE frac_rvt
= *value
;
7620 SET_REAL_EXP (&frac_rvt
, 0);
7621 frac
= build_real (rettype
, frac_rvt
);
7622 exp
= build_int_cst (integer_type_node
, REAL_EXP (value
));
7629 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7630 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
, exp
);
7631 TREE_SIDE_EFFECTS (arg1
) = 1;
7632 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
, frac
);
7638 /* Fold a call to builtin modf. */
7641 fold_builtin_modf (location_t loc
, tree arg0
, tree arg1
, tree rettype
)
7643 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
7648 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
7651 arg1
= build_fold_indirect_ref_loc (loc
, arg1
);
7653 /* Proceed if a valid pointer type was passed in. */
7654 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
7656 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
7657 REAL_VALUE_TYPE trunc
, frac
;
7663 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
7664 trunc
= frac
= *value
;
7667 /* For +-Inf, return (*arg1 = arg0, +-0). */
7669 frac
.sign
= value
->sign
;
7673 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
7674 real_trunc (&trunc
, VOIDmode
, value
);
7675 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
7676 /* If the original number was negative and already
7677 integral, then the fractional part is -0.0. */
7678 if (value
->sign
&& frac
.cl
== rvc_zero
)
7679 frac
.sign
= value
->sign
;
7683 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7684 arg1
= fold_build2_loc (loc
, MODIFY_EXPR
, rettype
, arg1
,
7685 build_real (rettype
, trunc
));
7686 TREE_SIDE_EFFECTS (arg1
) = 1;
7687 return fold_build2_loc (loc
, COMPOUND_EXPR
, rettype
, arg1
,
7688 build_real (rettype
, frac
));
7694 /* Given a location LOC, an interclass builtin function decl FNDECL
7695 and its single argument ARG, return an folded expression computing
7696 the same, or NULL_TREE if we either couldn't or didn't want to fold
7697 (the latter happen if there's an RTL instruction available). */
7700 fold_builtin_interclass_mathfn (location_t loc
, tree fndecl
, tree arg
)
7704 if (!validate_arg (arg
, REAL_TYPE
))
7707 if (interclass_mathfn_icode (arg
, fndecl
) != CODE_FOR_nothing
)
7710 mode
= TYPE_MODE (TREE_TYPE (arg
));
7712 bool is_ibm_extended
= MODE_COMPOSITE_P (mode
);
7714 /* If there is no optab, try generic code. */
7715 switch (DECL_FUNCTION_CODE (fndecl
))
7719 CASE_FLT_FN (BUILT_IN_ISINF
):
7721 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
7722 tree
const isgr_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
7723 tree type
= TREE_TYPE (arg
);
7727 if (is_ibm_extended
)
7729 /* NaN and Inf are encoded in the high-order double value
7730 only. The low-order value is not significant. */
7731 type
= double_type_node
;
7733 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
7735 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
7736 real_from_string (&r
, buf
);
7737 result
= build_call_expr (isgr_fn
, 2,
7738 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
7739 build_real (type
, r
));
7742 CASE_FLT_FN (BUILT_IN_FINITE
):
7743 case BUILT_IN_ISFINITE
:
7745 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
7746 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
7747 tree type
= TREE_TYPE (arg
);
7751 if (is_ibm_extended
)
7753 /* NaN and Inf are encoded in the high-order double value
7754 only. The low-order value is not significant. */
7755 type
= double_type_node
;
7757 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
7759 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
7760 real_from_string (&r
, buf
);
7761 result
= build_call_expr (isle_fn
, 2,
7762 fold_build1_loc (loc
, ABS_EXPR
, type
, arg
),
7763 build_real (type
, r
));
7764 /*result = fold_build2_loc (loc, UNGT_EXPR,
7765 TREE_TYPE (TREE_TYPE (fndecl)),
7766 fold_build1_loc (loc, ABS_EXPR, type, arg),
7767 build_real (type, r));
7768 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
7769 TREE_TYPE (TREE_TYPE (fndecl)),
7773 case BUILT_IN_ISNORMAL
:
7775 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
7776 islessequal(fabs(x),DBL_MAX). */
7777 tree
const isle_fn
= builtin_decl_explicit (BUILT_IN_ISLESSEQUAL
);
7778 tree type
= TREE_TYPE (arg
);
7779 tree orig_arg
, max_exp
, min_exp
;
7780 machine_mode orig_mode
= mode
;
7781 REAL_VALUE_TYPE rmax
, rmin
;
7784 orig_arg
= arg
= builtin_save_expr (arg
);
7785 if (is_ibm_extended
)
7787 /* Use double to test the normal range of IBM extended
7788 precision. Emin for IBM extended precision is
7789 different to emin for IEEE double, being 53 higher
7790 since the low double exponent is at least 53 lower
7791 than the high double exponent. */
7792 type
= double_type_node
;
7794 arg
= fold_build1_loc (loc
, NOP_EXPR
, type
, arg
);
7796 arg
= fold_build1_loc (loc
, ABS_EXPR
, type
, arg
);
7798 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
7799 real_from_string (&rmax
, buf
);
7800 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (orig_mode
)->emin
- 1);
7801 real_from_string (&rmin
, buf
);
7802 max_exp
= build_real (type
, rmax
);
7803 min_exp
= build_real (type
, rmin
);
7805 max_exp
= build_call_expr (isle_fn
, 2, arg
, max_exp
);
7806 if (is_ibm_extended
)
7808 /* Testing the high end of the range is done just using
7809 the high double, using the same test as isfinite().
7810 For the subnormal end of the range we first test the
7811 high double, then if its magnitude is equal to the
7812 limit of 0x1p-969, we test whether the low double is
7813 non-zero and opposite sign to the high double. */
7814 tree
const islt_fn
= builtin_decl_explicit (BUILT_IN_ISLESS
);
7815 tree
const isgt_fn
= builtin_decl_explicit (BUILT_IN_ISGREATER
);
7816 tree gt_min
= build_call_expr (isgt_fn
, 2, arg
, min_exp
);
7817 tree eq_min
= fold_build2 (EQ_EXPR
, integer_type_node
,
7819 tree as_complex
= build1 (VIEW_CONVERT_EXPR
,
7820 complex_double_type_node
, orig_arg
);
7821 tree hi_dbl
= build1 (REALPART_EXPR
, type
, as_complex
);
7822 tree lo_dbl
= build1 (IMAGPART_EXPR
, type
, as_complex
);
7823 tree zero
= build_real (type
, dconst0
);
7824 tree hilt
= build_call_expr (islt_fn
, 2, hi_dbl
, zero
);
7825 tree lolt
= build_call_expr (islt_fn
, 2, lo_dbl
, zero
);
7826 tree logt
= build_call_expr (isgt_fn
, 2, lo_dbl
, zero
);
7827 tree ok_lo
= fold_build1 (TRUTH_NOT_EXPR
, integer_type_node
,
7828 fold_build3 (COND_EXPR
,
7831 eq_min
= fold_build2 (TRUTH_ANDIF_EXPR
, integer_type_node
,
7833 min_exp
= fold_build2 (TRUTH_ORIF_EXPR
, integer_type_node
,
7839 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL
);
7840 min_exp
= build_call_expr (isge_fn
, 2, arg
, min_exp
);
7842 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
,
7853 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
7854 ARG is the argument for the call. */
7857 fold_builtin_classify (location_t loc
, tree fndecl
, tree arg
, int builtin_index
)
7859 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7861 if (!validate_arg (arg
, REAL_TYPE
))
7864 switch (builtin_index
)
7866 case BUILT_IN_ISINF
:
7867 if (!HONOR_INFINITIES (arg
))
7868 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
7872 case BUILT_IN_ISINF_SIGN
:
7874 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
7875 /* In a boolean context, GCC will fold the inner COND_EXPR to
7876 1. So e.g. "if (isinf_sign(x))" would be folded to just
7877 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
7878 tree signbit_fn
= builtin_decl_explicit (BUILT_IN_SIGNBIT
);
7879 tree isinf_fn
= builtin_decl_explicit (BUILT_IN_ISINF
);
7880 tree tmp
= NULL_TREE
;
7882 arg
= builtin_save_expr (arg
);
7884 if (signbit_fn
&& isinf_fn
)
7886 tree signbit_call
= build_call_expr_loc (loc
, signbit_fn
, 1, arg
);
7887 tree isinf_call
= build_call_expr_loc (loc
, isinf_fn
, 1, arg
);
7889 signbit_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
7890 signbit_call
, integer_zero_node
);
7891 isinf_call
= fold_build2_loc (loc
, NE_EXPR
, integer_type_node
,
7892 isinf_call
, integer_zero_node
);
7894 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, signbit_call
,
7895 integer_minus_one_node
, integer_one_node
);
7896 tmp
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
7904 case BUILT_IN_ISFINITE
:
7905 if (!HONOR_NANS (arg
)
7906 && !HONOR_INFINITIES (arg
))
7907 return omit_one_operand_loc (loc
, type
, integer_one_node
, arg
);
7911 case BUILT_IN_ISNAN
:
7912 if (!HONOR_NANS (arg
))
7913 return omit_one_operand_loc (loc
, type
, integer_zero_node
, arg
);
7916 bool is_ibm_extended
= MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg
)));
7917 if (is_ibm_extended
)
7919 /* NaN and Inf are encoded in the high-order double value
7920 only. The low-order value is not significant. */
7921 arg
= fold_build1_loc (loc
, NOP_EXPR
, double_type_node
, arg
);
7924 arg
= builtin_save_expr (arg
);
7925 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg
, arg
);
7932 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
7933 This builtin will generate code to return the appropriate floating
7934 point classification depending on the value of the floating point
7935 number passed in. The possible return values must be supplied as
7936 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
7937 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
7938 one floating point argument which is "type generic". */
7941 fold_builtin_fpclassify (location_t loc
, tree
*args
, int nargs
)
7943 tree fp_nan
, fp_infinite
, fp_normal
, fp_subnormal
, fp_zero
,
7944 arg
, type
, res
, tmp
;
7949 /* Verify the required arguments in the original call. */
7951 || !validate_arg (args
[0], INTEGER_TYPE
)
7952 || !validate_arg (args
[1], INTEGER_TYPE
)
7953 || !validate_arg (args
[2], INTEGER_TYPE
)
7954 || !validate_arg (args
[3], INTEGER_TYPE
)
7955 || !validate_arg (args
[4], INTEGER_TYPE
)
7956 || !validate_arg (args
[5], REAL_TYPE
))
7960 fp_infinite
= args
[1];
7961 fp_normal
= args
[2];
7962 fp_subnormal
= args
[3];
7965 type
= TREE_TYPE (arg
);
7966 mode
= TYPE_MODE (type
);
7967 arg
= builtin_save_expr (fold_build1_loc (loc
, ABS_EXPR
, type
, arg
));
7971 (fabs(x) == Inf ? FP_INFINITE :
7972 (fabs(x) >= DBL_MIN ? FP_NORMAL :
7973 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
7975 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
7976 build_real (type
, dconst0
));
7977 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
,
7978 tmp
, fp_zero
, fp_subnormal
);
7980 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
7981 real_from_string (&r
, buf
);
7982 tmp
= fold_build2_loc (loc
, GE_EXPR
, integer_type_node
,
7983 arg
, build_real (type
, r
));
7984 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, fp_normal
, res
);
7986 if (HONOR_INFINITIES (mode
))
7989 tmp
= fold_build2_loc (loc
, EQ_EXPR
, integer_type_node
, arg
,
7990 build_real (type
, r
));
7991 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
,
7995 if (HONOR_NANS (mode
))
7997 tmp
= fold_build2_loc (loc
, ORDERED_EXPR
, integer_type_node
, arg
, arg
);
7998 res
= fold_build3_loc (loc
, COND_EXPR
, integer_type_node
, tmp
, res
, fp_nan
);
8004 /* Fold a call to an unordered comparison function such as
8005 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8006 being called and ARG0 and ARG1 are the arguments for the call.
8007 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8008 the opposite of the desired result. UNORDERED_CODE is used
8009 for modes that can hold NaNs and ORDERED_CODE is used for
8013 fold_builtin_unordered_cmp (location_t loc
, tree fndecl
, tree arg0
, tree arg1
,
8014 enum tree_code unordered_code
,
8015 enum tree_code ordered_code
)
8017 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8018 enum tree_code code
;
8020 enum tree_code code0
, code1
;
8021 tree cmp_type
= NULL_TREE
;
8023 type0
= TREE_TYPE (arg0
);
8024 type1
= TREE_TYPE (arg1
);
8026 code0
= TREE_CODE (type0
);
8027 code1
= TREE_CODE (type1
);
8029 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
8030 /* Choose the wider of two real types. */
8031 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
8033 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
8035 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
8038 arg0
= fold_convert_loc (loc
, cmp_type
, arg0
);
8039 arg1
= fold_convert_loc (loc
, cmp_type
, arg1
);
8041 if (unordered_code
== UNORDERED_EXPR
)
8043 if (!HONOR_NANS (arg0
))
8044 return omit_two_operands_loc (loc
, type
, integer_zero_node
, arg0
, arg1
);
8045 return fold_build2_loc (loc
, UNORDERED_EXPR
, type
, arg0
, arg1
);
8048 code
= HONOR_NANS (arg0
) ? unordered_code
: ordered_code
;
8049 return fold_build1_loc (loc
, TRUTH_NOT_EXPR
, type
,
8050 fold_build2_loc (loc
, code
, type
, arg0
, arg1
));
8053 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8054 arithmetics if it can never overflow, or into internal functions that
8055 return both result of arithmetics and overflowed boolean flag in
8056 a complex integer result, or some other check for overflow.
8057 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8058 checking part of that. */
8061 fold_builtin_arith_overflow (location_t loc
, enum built_in_function fcode
,
8062 tree arg0
, tree arg1
, tree arg2
)
8064 enum internal_fn ifn
= IFN_LAST
;
8065 /* The code of the expression corresponding to the type-generic
8066 built-in, or ERROR_MARK for the type-specific ones. */
8067 enum tree_code opcode
= ERROR_MARK
;
8068 bool ovf_only
= false;
8072 case BUILT_IN_ADD_OVERFLOW_P
:
8075 case BUILT_IN_ADD_OVERFLOW
:
8078 case BUILT_IN_SADD_OVERFLOW
:
8079 case BUILT_IN_SADDL_OVERFLOW
:
8080 case BUILT_IN_SADDLL_OVERFLOW
:
8081 case BUILT_IN_UADD_OVERFLOW
:
8082 case BUILT_IN_UADDL_OVERFLOW
:
8083 case BUILT_IN_UADDLL_OVERFLOW
:
8084 ifn
= IFN_ADD_OVERFLOW
;
8086 case BUILT_IN_SUB_OVERFLOW_P
:
8089 case BUILT_IN_SUB_OVERFLOW
:
8090 opcode
= MINUS_EXPR
;
8092 case BUILT_IN_SSUB_OVERFLOW
:
8093 case BUILT_IN_SSUBL_OVERFLOW
:
8094 case BUILT_IN_SSUBLL_OVERFLOW
:
8095 case BUILT_IN_USUB_OVERFLOW
:
8096 case BUILT_IN_USUBL_OVERFLOW
:
8097 case BUILT_IN_USUBLL_OVERFLOW
:
8098 ifn
= IFN_SUB_OVERFLOW
;
8100 case BUILT_IN_MUL_OVERFLOW_P
:
8103 case BUILT_IN_MUL_OVERFLOW
:
8106 case BUILT_IN_SMUL_OVERFLOW
:
8107 case BUILT_IN_SMULL_OVERFLOW
:
8108 case BUILT_IN_SMULLL_OVERFLOW
:
8109 case BUILT_IN_UMUL_OVERFLOW
:
8110 case BUILT_IN_UMULL_OVERFLOW
:
8111 case BUILT_IN_UMULLL_OVERFLOW
:
8112 ifn
= IFN_MUL_OVERFLOW
;
8118 /* For the "generic" overloads, the first two arguments can have different
8119 types and the last argument determines the target type to use to check
8120 for overflow. The arguments of the other overloads all have the same
8122 tree type
= ovf_only
? TREE_TYPE (arg2
) : TREE_TYPE (TREE_TYPE (arg2
));
8124 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8125 arguments are constant, attempt to fold the built-in call into a constant
8126 expression indicating whether or not it detected an overflow. */
8128 && TREE_CODE (arg0
) == INTEGER_CST
8129 && TREE_CODE (arg1
) == INTEGER_CST
)
8130 /* Perform the computation in the target type and check for overflow. */
8131 return omit_one_operand_loc (loc
, boolean_type_node
,
8132 arith_overflowed_p (opcode
, type
, arg0
, arg1
)
8133 ? boolean_true_node
: boolean_false_node
,
8136 tree ctype
= build_complex_type (type
);
8137 tree call
= build_call_expr_internal_loc (loc
, ifn
, ctype
,
8139 tree tgt
= save_expr (call
);
8140 tree intres
= build1_loc (loc
, REALPART_EXPR
, type
, tgt
);
8141 tree ovfres
= build1_loc (loc
, IMAGPART_EXPR
, type
, tgt
);
8142 ovfres
= fold_convert_loc (loc
, boolean_type_node
, ovfres
);
8145 return omit_one_operand_loc (loc
, boolean_type_node
, ovfres
, arg2
);
8147 tree mem_arg2
= build_fold_indirect_ref_loc (loc
, arg2
);
8149 = fold_build2_loc (loc
, MODIFY_EXPR
, void_type_node
, mem_arg2
, intres
);
8150 return build2_loc (loc
, COMPOUND_EXPR
, boolean_type_node
, store
, ovfres
);
8153 /* Fold a call to __builtin_FILE to a constant string. */
8156 fold_builtin_FILE (location_t loc
)
8158 if (const char *fname
= LOCATION_FILE (loc
))
8159 return build_string_literal (strlen (fname
) + 1, fname
);
8161 return build_string_literal (1, "");
8164 /* Fold a call to __builtin_FUNCTION to a constant string. */
8167 fold_builtin_FUNCTION ()
8169 if (current_function_decl
)
8171 const char *name
= IDENTIFIER_POINTER (DECL_NAME (current_function_decl
));
8172 return build_string_literal (strlen (name
) + 1, name
);
8175 return build_string_literal (1, "");
8178 /* Fold a call to __builtin_LINE to an integer constant. */
8181 fold_builtin_LINE (location_t loc
, tree type
)
8183 return build_int_cst (type
, LOCATION_LINE (loc
));
8186 /* Fold a call to built-in function FNDECL with 0 arguments.
8187 This function returns NULL_TREE if no simplification was possible. */
8190 fold_builtin_0 (location_t loc
, tree fndecl
)
8192 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8193 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
8197 return fold_builtin_FILE (loc
);
8199 case BUILT_IN_FUNCTION
:
8200 return fold_builtin_FUNCTION ();
8203 return fold_builtin_LINE (loc
, type
);
8205 CASE_FLT_FN (BUILT_IN_INF
):
8206 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF
):
8207 case BUILT_IN_INFD32
:
8208 case BUILT_IN_INFD64
:
8209 case BUILT_IN_INFD128
:
8210 return fold_builtin_inf (loc
, type
, true);
8212 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
8213 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL
):
8214 return fold_builtin_inf (loc
, type
, false);
8216 case BUILT_IN_CLASSIFY_TYPE
:
8217 return fold_builtin_classify_type (NULL_TREE
);
8225 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8226 This function returns NULL_TREE if no simplification was possible. */
8229 fold_builtin_1 (location_t loc
, tree fndecl
, tree arg0
)
8231 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8232 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
8234 if (TREE_CODE (arg0
) == ERROR_MARK
)
8237 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
, arg0
))
8242 case BUILT_IN_CONSTANT_P
:
8244 tree val
= fold_builtin_constant_p (arg0
);
8246 /* Gimplification will pull the CALL_EXPR for the builtin out of
8247 an if condition. When not optimizing, we'll not CSE it back.
8248 To avoid link error types of regressions, return false now. */
8249 if (!val
&& !optimize
)
8250 val
= integer_zero_node
;
8255 case BUILT_IN_CLASSIFY_TYPE
:
8256 return fold_builtin_classify_type (arg0
);
8258 case BUILT_IN_STRLEN
:
8259 return fold_builtin_strlen (loc
, type
, arg0
);
8261 CASE_FLT_FN (BUILT_IN_FABS
):
8262 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS
):
8263 case BUILT_IN_FABSD32
:
8264 case BUILT_IN_FABSD64
:
8265 case BUILT_IN_FABSD128
:
8266 return fold_builtin_fabs (loc
, arg0
, type
);
8270 case BUILT_IN_LLABS
:
8271 case BUILT_IN_IMAXABS
:
8272 return fold_builtin_abs (loc
, arg0
, type
);
8274 CASE_FLT_FN (BUILT_IN_CONJ
):
8275 if (validate_arg (arg0
, COMPLEX_TYPE
)
8276 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8277 return fold_build1_loc (loc
, CONJ_EXPR
, type
, arg0
);
8280 CASE_FLT_FN (BUILT_IN_CREAL
):
8281 if (validate_arg (arg0
, COMPLEX_TYPE
)
8282 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8283 return non_lvalue_loc (loc
, fold_build1_loc (loc
, REALPART_EXPR
, type
, arg0
));
8286 CASE_FLT_FN (BUILT_IN_CIMAG
):
8287 if (validate_arg (arg0
, COMPLEX_TYPE
)
8288 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
)
8289 return non_lvalue_loc (loc
, fold_build1_loc (loc
, IMAGPART_EXPR
, type
, arg0
));
8292 CASE_FLT_FN (BUILT_IN_CARG
):
8293 return fold_builtin_carg (loc
, arg0
, type
);
8295 case BUILT_IN_ISASCII
:
8296 return fold_builtin_isascii (loc
, arg0
);
8298 case BUILT_IN_TOASCII
:
8299 return fold_builtin_toascii (loc
, arg0
);
8301 case BUILT_IN_ISDIGIT
:
8302 return fold_builtin_isdigit (loc
, arg0
);
8304 CASE_FLT_FN (BUILT_IN_FINITE
):
8305 case BUILT_IN_FINITED32
:
8306 case BUILT_IN_FINITED64
:
8307 case BUILT_IN_FINITED128
:
8308 case BUILT_IN_ISFINITE
:
8310 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISFINITE
);
8313 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
8316 CASE_FLT_FN (BUILT_IN_ISINF
):
8317 case BUILT_IN_ISINFD32
:
8318 case BUILT_IN_ISINFD64
:
8319 case BUILT_IN_ISINFD128
:
8321 tree ret
= fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF
);
8324 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
8327 case BUILT_IN_ISNORMAL
:
8328 return fold_builtin_interclass_mathfn (loc
, fndecl
, arg0
);
8330 case BUILT_IN_ISINF_SIGN
:
8331 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISINF_SIGN
);
8333 CASE_FLT_FN (BUILT_IN_ISNAN
):
8334 case BUILT_IN_ISNAND32
:
8335 case BUILT_IN_ISNAND64
:
8336 case BUILT_IN_ISNAND128
:
8337 return fold_builtin_classify (loc
, fndecl
, arg0
, BUILT_IN_ISNAN
);
8340 if (integer_zerop (arg0
))
8341 return build_empty_stmt (loc
);
8352 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8353 This function returns NULL_TREE if no simplification was possible. */
8356 fold_builtin_2 (location_t loc
, tree fndecl
, tree arg0
, tree arg1
)
8358 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8359 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
8361 if (TREE_CODE (arg0
) == ERROR_MARK
8362 || TREE_CODE (arg1
) == ERROR_MARK
)
8365 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
, arg0
, arg1
))
8370 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
8371 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
8372 if (validate_arg (arg0
, REAL_TYPE
)
8373 && validate_arg (arg1
, POINTER_TYPE
))
8374 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
8377 CASE_FLT_FN (BUILT_IN_FREXP
):
8378 return fold_builtin_frexp (loc
, arg0
, arg1
, type
);
8380 CASE_FLT_FN (BUILT_IN_MODF
):
8381 return fold_builtin_modf (loc
, arg0
, arg1
, type
);
8383 case BUILT_IN_STRSTR
:
8384 return fold_builtin_strstr (loc
, arg0
, arg1
, type
);
8386 case BUILT_IN_STRSPN
:
8387 return fold_builtin_strspn (loc
, arg0
, arg1
);
8389 case BUILT_IN_STRCSPN
:
8390 return fold_builtin_strcspn (loc
, arg0
, arg1
);
8392 case BUILT_IN_STRCMP
:
8393 return fold_builtin_strcmp (loc
, arg0
, arg1
);
8395 case BUILT_IN_STRPBRK
:
8396 return fold_builtin_strpbrk (loc
, arg0
, arg1
, type
);
8398 case BUILT_IN_EXPECT
:
8399 return fold_builtin_expect (loc
, arg0
, arg1
, NULL_TREE
);
8401 case BUILT_IN_ISGREATER
:
8402 return fold_builtin_unordered_cmp (loc
, fndecl
,
8403 arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
8404 case BUILT_IN_ISGREATEREQUAL
:
8405 return fold_builtin_unordered_cmp (loc
, fndecl
,
8406 arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
8407 case BUILT_IN_ISLESS
:
8408 return fold_builtin_unordered_cmp (loc
, fndecl
,
8409 arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
8410 case BUILT_IN_ISLESSEQUAL
:
8411 return fold_builtin_unordered_cmp (loc
, fndecl
,
8412 arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
8413 case BUILT_IN_ISLESSGREATER
:
8414 return fold_builtin_unordered_cmp (loc
, fndecl
,
8415 arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
8416 case BUILT_IN_ISUNORDERED
:
8417 return fold_builtin_unordered_cmp (loc
, fndecl
,
8418 arg0
, arg1
, UNORDERED_EXPR
,
8421 /* We do the folding for va_start in the expander. */
8422 case BUILT_IN_VA_START
:
8425 case BUILT_IN_OBJECT_SIZE
:
8426 return fold_builtin_object_size (arg0
, arg1
);
8428 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE
:
8429 return fold_builtin_atomic_always_lock_free (arg0
, arg1
);
8431 case BUILT_IN_ATOMIC_IS_LOCK_FREE
:
8432 return fold_builtin_atomic_is_lock_free (arg0
, arg1
);
8440 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8442 This function returns NULL_TREE if no simplification was possible. */
8445 fold_builtin_3 (location_t loc
, tree fndecl
,
8446 tree arg0
, tree arg1
, tree arg2
)
8448 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8449 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
8451 if (TREE_CODE (arg0
) == ERROR_MARK
8452 || TREE_CODE (arg1
) == ERROR_MARK
8453 || TREE_CODE (arg2
) == ERROR_MARK
)
8456 if (tree ret
= fold_const_call (as_combined_fn (fcode
), type
,
8463 CASE_FLT_FN (BUILT_IN_SINCOS
):
8464 return fold_builtin_sincos (loc
, arg0
, arg1
, arg2
);
8466 CASE_FLT_FN (BUILT_IN_FMA
):
8467 return fold_builtin_fma (loc
, arg0
, arg1
, arg2
, type
);
8469 CASE_FLT_FN (BUILT_IN_REMQUO
):
8470 if (validate_arg (arg0
, REAL_TYPE
)
8471 && validate_arg (arg1
, REAL_TYPE
)
8472 && validate_arg (arg2
, POINTER_TYPE
))
8473 return do_mpfr_remquo (arg0
, arg1
, arg2
);
8476 case BUILT_IN_STRNCMP
:
8477 return fold_builtin_strncmp (loc
, arg0
, arg1
, arg2
);
8479 case BUILT_IN_MEMCHR
:
8480 return fold_builtin_memchr (loc
, arg0
, arg1
, arg2
, type
);
8483 case BUILT_IN_MEMCMP
:
8484 return fold_builtin_memcmp (loc
, arg0
, arg1
, arg2
);;
8486 case BUILT_IN_EXPECT
:
8487 return fold_builtin_expect (loc
, arg0
, arg1
, arg2
);
8489 case BUILT_IN_ADD_OVERFLOW
:
8490 case BUILT_IN_SUB_OVERFLOW
:
8491 case BUILT_IN_MUL_OVERFLOW
:
8492 case BUILT_IN_ADD_OVERFLOW_P
:
8493 case BUILT_IN_SUB_OVERFLOW_P
:
8494 case BUILT_IN_MUL_OVERFLOW_P
:
8495 case BUILT_IN_SADD_OVERFLOW
:
8496 case BUILT_IN_SADDL_OVERFLOW
:
8497 case BUILT_IN_SADDLL_OVERFLOW
:
8498 case BUILT_IN_SSUB_OVERFLOW
:
8499 case BUILT_IN_SSUBL_OVERFLOW
:
8500 case BUILT_IN_SSUBLL_OVERFLOW
:
8501 case BUILT_IN_SMUL_OVERFLOW
:
8502 case BUILT_IN_SMULL_OVERFLOW
:
8503 case BUILT_IN_SMULLL_OVERFLOW
:
8504 case BUILT_IN_UADD_OVERFLOW
:
8505 case BUILT_IN_UADDL_OVERFLOW
:
8506 case BUILT_IN_UADDLL_OVERFLOW
:
8507 case BUILT_IN_USUB_OVERFLOW
:
8508 case BUILT_IN_USUBL_OVERFLOW
:
8509 case BUILT_IN_USUBLL_OVERFLOW
:
8510 case BUILT_IN_UMUL_OVERFLOW
:
8511 case BUILT_IN_UMULL_OVERFLOW
:
8512 case BUILT_IN_UMULLL_OVERFLOW
:
8513 return fold_builtin_arith_overflow (loc
, fcode
, arg0
, arg1
, arg2
);
8521 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
8522 arguments. IGNORE is true if the result of the
8523 function call is ignored. This function returns NULL_TREE if no
8524 simplification was possible. */
8527 fold_builtin_n (location_t loc
, tree fndecl
, tree
*args
, int nargs
, bool)
8529 tree ret
= NULL_TREE
;
8534 ret
= fold_builtin_0 (loc
, fndecl
);
8537 ret
= fold_builtin_1 (loc
, fndecl
, args
[0]);
8540 ret
= fold_builtin_2 (loc
, fndecl
, args
[0], args
[1]);
8543 ret
= fold_builtin_3 (loc
, fndecl
, args
[0], args
[1], args
[2]);
8546 ret
= fold_builtin_varargs (loc
, fndecl
, args
, nargs
);
8551 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
8552 SET_EXPR_LOCATION (ret
, loc
);
8553 TREE_NO_WARNING (ret
) = 1;
8559 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
8560 list ARGS along with N new arguments in NEWARGS. SKIP is the number
8561 of arguments in ARGS to be omitted. OLDNARGS is the number of
8562 elements in ARGS. */
8565 rewrite_call_expr_valist (location_t loc
, int oldnargs
, tree
*args
,
8566 int skip
, tree fndecl
, int n
, va_list newargs
)
8568 int nargs
= oldnargs
- skip
+ n
;
8575 buffer
= XALLOCAVEC (tree
, nargs
);
8576 for (i
= 0; i
< n
; i
++)
8577 buffer
[i
] = va_arg (newargs
, tree
);
8578 for (j
= skip
; j
< oldnargs
; j
++, i
++)
8579 buffer
[i
] = args
[j
];
8582 buffer
= args
+ skip
;
8584 return build_call_expr_loc_array (loc
, fndecl
, nargs
, buffer
);
8587 /* Return true if FNDECL shouldn't be folded right now.
8588 If a built-in function has an inline attribute always_inline
8589 wrapper, defer folding it after always_inline functions have
8590 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
8591 might not be performed. */
8594 avoid_folding_inline_builtin (tree fndecl
)
8596 return (DECL_DECLARED_INLINE_P (fndecl
)
8597 && DECL_DISREGARD_INLINE_LIMITS (fndecl
)
8599 && !cfun
->always_inline_functions_inlined
8600 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl
)));
8603 /* A wrapper function for builtin folding that prevents warnings for
8604 "statement without effect" and the like, caused by removing the
8605 call node earlier than the warning is generated. */
8608 fold_call_expr (location_t loc
, tree exp
, bool ignore
)
8610 tree ret
= NULL_TREE
;
8611 tree fndecl
= get_callee_fndecl (exp
);
8613 && TREE_CODE (fndecl
) == FUNCTION_DECL
8614 && DECL_BUILT_IN (fndecl
)
8615 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
8616 yet. Defer folding until we see all the arguments
8617 (after inlining). */
8618 && !CALL_EXPR_VA_ARG_PACK (exp
))
8620 int nargs
= call_expr_nargs (exp
);
8622 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
8623 instead last argument is __builtin_va_arg_pack (). Defer folding
8624 even in that case, until arguments are finalized. */
8625 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
8627 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
8629 && TREE_CODE (fndecl2
) == FUNCTION_DECL
8630 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
8631 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
8635 if (avoid_folding_inline_builtin (fndecl
))
8638 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
8639 return targetm
.fold_builtin (fndecl
, call_expr_nargs (exp
),
8640 CALL_EXPR_ARGP (exp
), ignore
);
8643 tree
*args
= CALL_EXPR_ARGP (exp
);
8644 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
8652 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
8653 N arguments are passed in the array ARGARRAY. Return a folded
8654 expression or NULL_TREE if no simplification was possible. */
8657 fold_builtin_call_array (location_t loc
, tree
,
8662 if (TREE_CODE (fn
) != ADDR_EXPR
)
8665 tree fndecl
= TREE_OPERAND (fn
, 0);
8666 if (TREE_CODE (fndecl
) == FUNCTION_DECL
8667 && DECL_BUILT_IN (fndecl
))
8669 /* If last argument is __builtin_va_arg_pack (), arguments to this
8670 function are not finalized yet. Defer folding until they are. */
8671 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
8673 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
8675 && TREE_CODE (fndecl2
) == FUNCTION_DECL
8676 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
8677 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
8680 if (avoid_folding_inline_builtin (fndecl
))
8682 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
8683 return targetm
.fold_builtin (fndecl
, n
, argarray
, false);
8685 return fold_builtin_n (loc
, fndecl
, argarray
, n
, false);
8691 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
8692 along with N new arguments specified as the "..." parameters. SKIP
8693 is the number of arguments in EXP to be omitted. This function is used
8694 to do varargs-to-varargs transformations. */
8697 rewrite_call_expr (location_t loc
, tree exp
, int skip
, tree fndecl
, int n
, ...)
8703 t
= rewrite_call_expr_valist (loc
, call_expr_nargs (exp
),
8704 CALL_EXPR_ARGP (exp
), skip
, fndecl
, n
, ap
);
8710 /* Validate a single argument ARG against a tree code CODE representing
8714 validate_arg (const_tree arg
, enum tree_code code
)
8718 else if (code
== POINTER_TYPE
)
8719 return POINTER_TYPE_P (TREE_TYPE (arg
));
8720 else if (code
== INTEGER_TYPE
)
8721 return INTEGRAL_TYPE_P (TREE_TYPE (arg
));
8722 return code
== TREE_CODE (TREE_TYPE (arg
));
8725 /* This function validates the types of a function call argument list
8726 against a specified list of tree_codes. If the last specifier is a 0,
8727 that represents an ellipses, otherwise the last specifier must be a
8730 This is the GIMPLE version of validate_arglist. Eventually we want to
8731 completely convert builtins.c to work from GIMPLEs and the tree based
8732 validate_arglist will then be removed. */
8735 validate_gimple_arglist (const gcall
*call
, ...)
8737 enum tree_code code
;
8743 va_start (ap
, call
);
8748 code
= (enum tree_code
) va_arg (ap
, int);
8752 /* This signifies an ellipses, any further arguments are all ok. */
8756 /* This signifies an endlink, if no arguments remain, return
8757 true, otherwise return false. */
8758 res
= (i
== gimple_call_num_args (call
));
8761 /* If no parameters remain or the parameter's code does not
8762 match the specified code, return false. Otherwise continue
8763 checking any remaining arguments. */
8764 arg
= gimple_call_arg (call
, i
++);
8765 if (!validate_arg (arg
, code
))
8772 /* We need gotos here since we can only have one VA_CLOSE in a
8780 /* Default target-specific builtin expander that does nothing. */
8783 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
8784 rtx target ATTRIBUTE_UNUSED
,
8785 rtx subtarget ATTRIBUTE_UNUSED
,
8786 machine_mode mode ATTRIBUTE_UNUSED
,
8787 int ignore ATTRIBUTE_UNUSED
)
8792 /* Returns true is EXP represents data that would potentially reside
8793 in a readonly section. */
8796 readonly_data_expr (tree exp
)
8800 if (TREE_CODE (exp
) != ADDR_EXPR
)
8803 exp
= get_base_address (TREE_OPERAND (exp
, 0));
8807 /* Make sure we call decl_readonly_section only for trees it
8808 can handle (since it returns true for everything it doesn't
8810 if (TREE_CODE (exp
) == STRING_CST
8811 || TREE_CODE (exp
) == CONSTRUCTOR
8812 || (VAR_P (exp
) && TREE_STATIC (exp
)))
8813 return decl_readonly_section (exp
, 0);
8818 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
8819 to the call, and TYPE is its return type.
8821 Return NULL_TREE if no simplification was possible, otherwise return the
8822 simplified form of the call as a tree.
8824 The simplified form may be a constant or other expression which
8825 computes the same value, but in a more efficient manner (including
8826 calls to other builtin functions).
8828 The call may contain arguments which need to be evaluated, but
8829 which are not useful to determine the result of the call. In
8830 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8831 COMPOUND_EXPR will be an argument which must be evaluated.
8832 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8833 COMPOUND_EXPR in the chain will contain the tree for the simplified
8834 form of the builtin function call. */
8837 fold_builtin_strstr (location_t loc
, tree s1
, tree s2
, tree type
)
8839 if (!validate_arg (s1
, POINTER_TYPE
)
8840 || !validate_arg (s2
, POINTER_TYPE
))
8845 const char *p1
, *p2
;
8854 const char *r
= strstr (p1
, p2
);
8858 return build_int_cst (TREE_TYPE (s1
), 0);
8860 /* Return an offset into the constant string argument. */
8861 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
8862 return fold_convert_loc (loc
, type
, tem
);
8865 /* The argument is const char *, and the result is char *, so we need
8866 a type conversion here to avoid a warning. */
8868 return fold_convert_loc (loc
, type
, s1
);
8873 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
8877 /* New argument list transforming strstr(s1, s2) to
8878 strchr(s1, s2[0]). */
8879 return build_call_expr_loc (loc
, fn
, 2, s1
,
8880 build_int_cst (integer_type_node
, p2
[0]));
8884 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
8885 to the call, and TYPE is its return type.
8887 Return NULL_TREE if no simplification was possible, otherwise return the
8888 simplified form of the call as a tree.
8890 The simplified form may be a constant or other expression which
8891 computes the same value, but in a more efficient manner (including
8892 calls to other builtin functions).
8894 The call may contain arguments which need to be evaluated, but
8895 which are not useful to determine the result of the call. In
8896 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8897 COMPOUND_EXPR will be an argument which must be evaluated.
8898 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8899 COMPOUND_EXPR in the chain will contain the tree for the simplified
8900 form of the builtin function call. */
8903 fold_builtin_strpbrk (location_t loc
, tree s1
, tree s2
, tree type
)
8905 if (!validate_arg (s1
, POINTER_TYPE
)
8906 || !validate_arg (s2
, POINTER_TYPE
))
8911 const char *p1
, *p2
;
8920 const char *r
= strpbrk (p1
, p2
);
8924 return build_int_cst (TREE_TYPE (s1
), 0);
8926 /* Return an offset into the constant string argument. */
8927 tem
= fold_build_pointer_plus_hwi_loc (loc
, s1
, r
- p1
);
8928 return fold_convert_loc (loc
, type
, tem
);
8932 /* strpbrk(x, "") == NULL.
8933 Evaluate and ignore s1 in case it had side-effects. */
8934 return omit_one_operand_loc (loc
, TREE_TYPE (s1
), integer_zero_node
, s1
);
8937 return NULL_TREE
; /* Really call strpbrk. */
8939 fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
8943 /* New argument list transforming strpbrk(s1, s2) to
8944 strchr(s1, s2[0]). */
8945 return build_call_expr_loc (loc
, fn
, 2, s1
,
8946 build_int_cst (integer_type_node
, p2
[0]));
8950 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
8953 Return NULL_TREE if no simplification was possible, otherwise return the
8954 simplified form of the call as a tree.
8956 The simplified form may be a constant or other expression which
8957 computes the same value, but in a more efficient manner (including
8958 calls to other builtin functions).
8960 The call may contain arguments which need to be evaluated, but
8961 which are not useful to determine the result of the call. In
8962 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8963 COMPOUND_EXPR will be an argument which must be evaluated.
8964 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8965 COMPOUND_EXPR in the chain will contain the tree for the simplified
8966 form of the builtin function call. */
8969 fold_builtin_strspn (location_t loc
, tree s1
, tree s2
)
8971 if (!validate_arg (s1
, POINTER_TYPE
)
8972 || !validate_arg (s2
, POINTER_TYPE
))
8976 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
8978 /* If either argument is "", return NULL_TREE. */
8979 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
8980 /* Evaluate and ignore both arguments in case either one has
8982 return omit_two_operands_loc (loc
, size_type_node
, size_zero_node
,
8988 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
8991 Return NULL_TREE if no simplification was possible, otherwise return the
8992 simplified form of the call as a tree.
8994 The simplified form may be a constant or other expression which
8995 computes the same value, but in a more efficient manner (including
8996 calls to other builtin functions).
8998 The call may contain arguments which need to be evaluated, but
8999 which are not useful to determine the result of the call. In
9000 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9001 COMPOUND_EXPR will be an argument which must be evaluated.
9002 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9003 COMPOUND_EXPR in the chain will contain the tree for the simplified
9004 form of the builtin function call. */
9007 fold_builtin_strcspn (location_t loc
, tree s1
, tree s2
)
9009 if (!validate_arg (s1
, POINTER_TYPE
)
9010 || !validate_arg (s2
, POINTER_TYPE
))
9014 /* If the first argument is "", return NULL_TREE. */
9015 const char *p1
= c_getstr (s1
);
9016 if (p1
&& *p1
== '\0')
9018 /* Evaluate and ignore argument s2 in case it has
9020 return omit_one_operand_loc (loc
, size_type_node
,
9021 size_zero_node
, s2
);
9024 /* If the second argument is "", return __builtin_strlen(s1). */
9025 const char *p2
= c_getstr (s2
);
9026 if (p2
&& *p2
== '\0')
9028 tree fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
9030 /* If the replacement _DECL isn't initialized, don't do the
9035 return build_call_expr_loc (loc
, fn
, 1, s1
);
9041 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9042 produced. False otherwise. This is done so that we don't output the error
9043 or warning twice or three times. */
9046 fold_builtin_next_arg (tree exp
, bool va_start_p
)
9048 tree fntype
= TREE_TYPE (current_function_decl
);
9049 int nargs
= call_expr_nargs (exp
);
9051 /* There is good chance the current input_location points inside the
9052 definition of the va_start macro (perhaps on the token for
9053 builtin) in a system header, so warnings will not be emitted.
9054 Use the location in real source code. */
9055 source_location current_location
=
9056 linemap_unwind_to_first_non_reserved_loc (line_table
, input_location
,
9059 if (!stdarg_p (fntype
))
9061 error ("%<va_start%> used in function with fixed args");
9067 if (va_start_p
&& (nargs
!= 2))
9069 error ("wrong number of arguments to function %<va_start%>");
9072 arg
= CALL_EXPR_ARG (exp
, 1);
9074 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9075 when we checked the arguments and if needed issued a warning. */
9080 /* Evidently an out of date version of <stdarg.h>; can't validate
9081 va_start's second argument, but can still work as intended. */
9082 warning_at (current_location
,
9084 "%<__builtin_next_arg%> called without an argument");
9089 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9092 arg
= CALL_EXPR_ARG (exp
, 0);
9095 if (TREE_CODE (arg
) == SSA_NAME
)
9096 arg
= SSA_NAME_VAR (arg
);
9098 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9099 or __builtin_next_arg (0) the first time we see it, after checking
9100 the arguments and if needed issuing a warning. */
9101 if (!integer_zerop (arg
))
9103 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
9105 /* Strip off all nops for the sake of the comparison. This
9106 is not quite the same as STRIP_NOPS. It does more.
9107 We must also strip off INDIRECT_EXPR for C++ reference
9109 while (CONVERT_EXPR_P (arg
)
9110 || TREE_CODE (arg
) == INDIRECT_REF
)
9111 arg
= TREE_OPERAND (arg
, 0);
9112 if (arg
!= last_parm
)
9114 /* FIXME: Sometimes with the tree optimizers we can get the
9115 not the last argument even though the user used the last
9116 argument. We just warn and set the arg to be the last
9117 argument so that we will get wrong-code because of
9119 warning_at (current_location
,
9121 "second parameter of %<va_start%> not last named argument");
9124 /* Undefined by C99 7.15.1.4p4 (va_start):
9125 "If the parameter parmN is declared with the register storage
9126 class, with a function or array type, or with a type that is
9127 not compatible with the type that results after application of
9128 the default argument promotions, the behavior is undefined."
9130 else if (DECL_REGISTER (arg
))
9132 warning_at (current_location
,
9134 "undefined behavior when second parameter of "
9135 "%<va_start%> is declared with %<register%> storage");
9138 /* We want to verify the second parameter just once before the tree
9139 optimizers are run and then avoid keeping it in the tree,
9140 as otherwise we could warn even for correct code like:
9141 void foo (int i, ...)
9142 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9144 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
9146 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
9152 /* Expand a call EXP to __builtin_object_size. */
9155 expand_builtin_object_size (tree exp
)
9158 int object_size_type
;
9159 tree fndecl
= get_callee_fndecl (exp
);
9161 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
9163 error ("%Kfirst argument of %D must be a pointer, second integer constant",
9165 expand_builtin_trap ();
9169 ost
= CALL_EXPR_ARG (exp
, 1);
9172 if (TREE_CODE (ost
) != INTEGER_CST
9173 || tree_int_cst_sgn (ost
) < 0
9174 || compare_tree_int (ost
, 3) > 0)
9176 error ("%Klast argument of %D is not integer constant between 0 and 3",
9178 expand_builtin_trap ();
9182 object_size_type
= tree_to_shwi (ost
);
9184 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
9187 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9188 FCODE is the BUILT_IN_* to use.
9189 Return NULL_RTX if we failed; the caller should emit a normal call,
9190 otherwise try to get the result in TARGET, if convenient (and in
9191 mode MODE if that's convenient). */
9194 expand_builtin_memory_chk (tree exp
, rtx target
, machine_mode mode
,
9195 enum built_in_function fcode
)
9197 tree dest
, src
, len
, size
;
9199 if (!validate_arglist (exp
,
9201 fcode
== BUILT_IN_MEMSET_CHK
9202 ? INTEGER_TYPE
: POINTER_TYPE
,
9203 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
9206 dest
= CALL_EXPR_ARG (exp
, 0);
9207 src
= CALL_EXPR_ARG (exp
, 1);
9208 len
= CALL_EXPR_ARG (exp
, 2);
9209 size
= CALL_EXPR_ARG (exp
, 3);
9211 if (! tree_fits_uhwi_p (size
))
9214 if (tree_fits_uhwi_p (len
) || integer_all_onesp (size
))
9218 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
9220 warning_at (tree_nonartificial_location (exp
),
9221 0, "%Kcall to %D will always overflow destination buffer",
9222 exp
, get_callee_fndecl (exp
));
9227 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9228 mem{cpy,pcpy,move,set} is available. */
9231 case BUILT_IN_MEMCPY_CHK
:
9232 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
9234 case BUILT_IN_MEMPCPY_CHK
:
9235 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
9237 case BUILT_IN_MEMMOVE_CHK
:
9238 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
9240 case BUILT_IN_MEMSET_CHK
:
9241 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
9250 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 3, dest
, src
, len
);
9251 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
9252 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
9253 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
9255 else if (fcode
== BUILT_IN_MEMSET_CHK
)
9259 unsigned int dest_align
= get_pointer_alignment (dest
);
9261 /* If DEST is not a pointer type, call the normal function. */
9262 if (dest_align
== 0)
9265 /* If SRC and DEST are the same (and not volatile), do nothing. */
9266 if (operand_equal_p (src
, dest
, 0))
9270 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
9272 /* Evaluate and ignore LEN in case it has side-effects. */
9273 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
9274 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
9277 expr
= fold_build_pointer_plus (dest
, len
);
9278 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
9281 /* __memmove_chk special case. */
9282 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
9284 unsigned int src_align
= get_pointer_alignment (src
);
9289 /* If src is categorized for a readonly section we can use
9290 normal __memcpy_chk. */
9291 if (readonly_data_expr (src
))
9293 tree fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
9296 fn
= build_call_nofold_loc (EXPR_LOCATION (exp
), fn
, 4,
9297 dest
, src
, len
, size
);
9298 gcc_assert (TREE_CODE (fn
) == CALL_EXPR
);
9299 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
9300 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
9307 /* Emit warning if a buffer overflow is detected at compile time. */
9310 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
9314 location_t loc
= tree_nonartificial_location (exp
);
9318 case BUILT_IN_STRCPY_CHK
:
9319 case BUILT_IN_STPCPY_CHK
:
9320 /* For __strcat_chk the warning will be emitted only if overflowing
9321 by at least strlen (dest) + 1 bytes. */
9322 case BUILT_IN_STRCAT_CHK
:
9323 len
= CALL_EXPR_ARG (exp
, 1);
9324 size
= CALL_EXPR_ARG (exp
, 2);
9327 case BUILT_IN_STRNCAT_CHK
:
9328 case BUILT_IN_STRNCPY_CHK
:
9329 case BUILT_IN_STPNCPY_CHK
:
9330 len
= CALL_EXPR_ARG (exp
, 2);
9331 size
= CALL_EXPR_ARG (exp
, 3);
9333 case BUILT_IN_SNPRINTF_CHK
:
9334 case BUILT_IN_VSNPRINTF_CHK
:
9335 len
= CALL_EXPR_ARG (exp
, 1);
9336 size
= CALL_EXPR_ARG (exp
, 3);
9345 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
9350 len
= c_strlen (len
, 1);
9351 if (! len
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
9354 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
9356 tree src
= CALL_EXPR_ARG (exp
, 1);
9357 if (! src
|| ! tree_fits_uhwi_p (len
) || tree_int_cst_lt (len
, size
))
9359 src
= c_strlen (src
, 1);
9360 if (! src
|| ! tree_fits_uhwi_p (src
))
9362 warning_at (loc
, 0, "%Kcall to %D might overflow destination buffer",
9363 exp
, get_callee_fndecl (exp
));
9366 else if (tree_int_cst_lt (src
, size
))
9369 else if (! tree_fits_uhwi_p (len
) || ! tree_int_cst_lt (size
, len
))
9372 warning_at (loc
, 0, "%Kcall to %D will always overflow destination buffer",
9373 exp
, get_callee_fndecl (exp
));
9376 /* Emit warning if a buffer overflow is detected at compile time
9377 in __sprintf_chk/__vsprintf_chk calls. */
9380 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
9382 tree size
, len
, fmt
;
9383 const char *fmt_str
;
9384 int nargs
= call_expr_nargs (exp
);
9386 /* Verify the required arguments in the original call. */
9390 size
= CALL_EXPR_ARG (exp
, 2);
9391 fmt
= CALL_EXPR_ARG (exp
, 3);
9393 if (! tree_fits_uhwi_p (size
) || integer_all_onesp (size
))
9396 /* Check whether the format is a literal string constant. */
9397 fmt_str
= c_getstr (fmt
);
9398 if (fmt_str
== NULL
)
9401 if (!init_target_chars ())
9404 /* If the format doesn't contain % args or %%, we know its size. */
9405 if (strchr (fmt_str
, target_percent
) == 0)
9406 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
9407 /* If the format is "%s" and first ... argument is a string literal,
9409 else if (fcode
== BUILT_IN_SPRINTF_CHK
9410 && strcmp (fmt_str
, target_percent_s
) == 0)
9416 arg
= CALL_EXPR_ARG (exp
, 4);
9417 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
9420 len
= c_strlen (arg
, 1);
9421 if (!len
|| ! tree_fits_uhwi_p (len
))
9427 if (! tree_int_cst_lt (len
, size
))
9428 warning_at (tree_nonartificial_location (exp
),
9429 0, "%Kcall to %D will always overflow destination buffer",
9430 exp
, get_callee_fndecl (exp
));
9433 /* Emit warning if a free is called with address of a variable. */
9436 maybe_emit_free_warning (tree exp
)
9438 tree arg
= CALL_EXPR_ARG (exp
, 0);
9441 if (TREE_CODE (arg
) != ADDR_EXPR
)
9444 arg
= get_base_address (TREE_OPERAND (arg
, 0));
9445 if (arg
== NULL
|| INDIRECT_REF_P (arg
) || TREE_CODE (arg
) == MEM_REF
)
9448 if (SSA_VAR_P (arg
))
9449 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
9450 "%Kattempt to free a non-heap object %qD", exp
, arg
);
9452 warning_at (tree_nonartificial_location (exp
), OPT_Wfree_nonheap_object
,
9453 "%Kattempt to free a non-heap object", exp
);
9456 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9460 fold_builtin_object_size (tree ptr
, tree ost
)
9462 unsigned HOST_WIDE_INT bytes
;
9463 int object_size_type
;
9465 if (!validate_arg (ptr
, POINTER_TYPE
)
9466 || !validate_arg (ost
, INTEGER_TYPE
))
9471 if (TREE_CODE (ost
) != INTEGER_CST
9472 || tree_int_cst_sgn (ost
) < 0
9473 || compare_tree_int (ost
, 3) > 0)
9476 object_size_type
= tree_to_shwi (ost
);
9478 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9479 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9480 and (size_t) 0 for types 2 and 3. */
9481 if (TREE_SIDE_EFFECTS (ptr
))
9482 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
9484 if (TREE_CODE (ptr
) == ADDR_EXPR
)
9486 compute_builtin_object_size (ptr
, object_size_type
, &bytes
);
9487 if (wi::fits_to_tree_p (bytes
, size_type_node
))
9488 return build_int_cstu (size_type_node
, bytes
);
9490 else if (TREE_CODE (ptr
) == SSA_NAME
)
9492 /* If object size is not known yet, delay folding until
9493 later. Maybe subsequent passes will help determining
9495 if (compute_builtin_object_size (ptr
, object_size_type
, &bytes
)
9496 && wi::fits_to_tree_p (bytes
, size_type_node
))
9497 return build_int_cstu (size_type_node
, bytes
);
9503 /* Builtins with folding operations that operate on "..." arguments
9504 need special handling; we need to store the arguments in a convenient
9505 data structure before attempting any folding. Fortunately there are
9506 only a few builtins that fall into this category. FNDECL is the
9507 function, EXP is the CALL_EXPR for the call. */
9510 fold_builtin_varargs (location_t loc
, tree fndecl
, tree
*args
, int nargs
)
9512 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9513 tree ret
= NULL_TREE
;
9517 case BUILT_IN_FPCLASSIFY
:
9518 ret
= fold_builtin_fpclassify (loc
, args
, nargs
);
9526 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
9527 SET_EXPR_LOCATION (ret
, loc
);
9528 TREE_NO_WARNING (ret
) = 1;
9534 /* Initialize format string characters in the target charset. */
9537 init_target_chars (void)
9542 target_newline
= lang_hooks
.to_target_charset ('\n');
9543 target_percent
= lang_hooks
.to_target_charset ('%');
9544 target_c
= lang_hooks
.to_target_charset ('c');
9545 target_s
= lang_hooks
.to_target_charset ('s');
9546 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
9550 target_percent_c
[0] = target_percent
;
9551 target_percent_c
[1] = target_c
;
9552 target_percent_c
[2] = '\0';
9554 target_percent_s
[0] = target_percent
;
9555 target_percent_s
[1] = target_s
;
9556 target_percent_s
[2] = '\0';
9558 target_percent_s_newline
[0] = target_percent
;
9559 target_percent_s_newline
[1] = target_s
;
9560 target_percent_s_newline
[2] = target_newline
;
9561 target_percent_s_newline
[3] = '\0';
9568 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
9569 and no overflow/underflow occurred. INEXACT is true if M was not
9570 exactly calculated. TYPE is the tree type for the result. This
9571 function assumes that you cleared the MPFR flags and then
9572 calculated M to see if anything subsequently set a flag prior to
9573 entering this function. Return NULL_TREE if any checks fail. */
9576 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
9578 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9579 overflow/underflow occurred. If -frounding-math, proceed iff the
9580 result of calling FUNC was exact. */
9581 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
9582 && (!flag_rounding_math
|| !inexact
))
9586 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
9587 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
9588 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9589 but the mpft_t is not, then we underflowed in the
9591 if (real_isfinite (&rr
)
9592 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
9594 REAL_VALUE_TYPE rmode
;
9596 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
9597 /* Proceed iff the specified mode can hold the value. */
9598 if (real_identical (&rmode
, &rr
))
9599 return build_real (type
, rmode
);
9605 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
9606 number and no overflow/underflow occurred. INEXACT is true if M
9607 was not exactly calculated. TYPE is the tree type for the result.
9608 This function assumes that you cleared the MPFR flags and then
9609 calculated M to see if anything subsequently set a flag prior to
9610 entering this function. Return NULL_TREE if any checks fail, if
9611 FORCE_CONVERT is true, then bypass the checks. */
9614 do_mpc_ckconv (mpc_srcptr m
, tree type
, int inexact
, int force_convert
)
9616 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9617 overflow/underflow occurred. If -frounding-math, proceed iff the
9618 result of calling FUNC was exact. */
9620 || (mpfr_number_p (mpc_realref (m
)) && mpfr_number_p (mpc_imagref (m
))
9621 && !mpfr_overflow_p () && !mpfr_underflow_p ()
9622 && (!flag_rounding_math
|| !inexact
)))
9624 REAL_VALUE_TYPE re
, im
;
9626 real_from_mpfr (&re
, mpc_realref (m
), TREE_TYPE (type
), GMP_RNDN
);
9627 real_from_mpfr (&im
, mpc_imagref (m
), TREE_TYPE (type
), GMP_RNDN
);
9628 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
9629 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9630 but the mpft_t is not, then we underflowed in the
9633 || (real_isfinite (&re
) && real_isfinite (&im
)
9634 && (re
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_realref (m
)) != 0)
9635 && (im
.cl
== rvc_zero
) == (mpfr_zero_p (mpc_imagref (m
)) != 0)))
9637 REAL_VALUE_TYPE re_mode
, im_mode
;
9639 real_convert (&re_mode
, TYPE_MODE (TREE_TYPE (type
)), &re
);
9640 real_convert (&im_mode
, TYPE_MODE (TREE_TYPE (type
)), &im
);
9641 /* Proceed iff the specified mode can hold the value. */
9643 || (real_identical (&re_mode
, &re
)
9644 && real_identical (&im_mode
, &im
)))
9645 return build_complex (type
, build_real (TREE_TYPE (type
), re_mode
),
9646 build_real (TREE_TYPE (type
), im_mode
));
9652 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
9653 the pointer *(ARG_QUO) and return the result. The type is taken
9654 from the type of ARG0 and is used for setting the precision of the
9655 calculation and results. */
9658 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
9660 tree
const type
= TREE_TYPE (arg0
);
9661 tree result
= NULL_TREE
;
9666 /* To proceed, MPFR must exactly represent the target floating point
9667 format, which only happens when the target base equals two. */
9668 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
9669 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
9670 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
9672 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
9673 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
9675 if (real_isfinite (ra0
) && real_isfinite (ra1
))
9677 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
9678 const int prec
= fmt
->p
;
9679 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
9684 mpfr_inits2 (prec
, m0
, m1
, NULL
);
9685 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
9686 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
9687 mpfr_clear_flags ();
9688 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, rnd
);
9689 /* Remquo is independent of the rounding mode, so pass
9690 inexact=0 to do_mpfr_ckconv(). */
9691 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
9692 mpfr_clears (m0
, m1
, NULL
);
9695 /* MPFR calculates quo in the host's long so it may
9696 return more bits in quo than the target int can hold
9697 if sizeof(host long) > sizeof(target int). This can
9698 happen even for native compilers in LP64 mode. In
9699 these cases, modulo the quo value with the largest
9700 number that the target int can hold while leaving one
9701 bit for the sign. */
9702 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
9703 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
9705 /* Dereference the quo pointer argument. */
9706 arg_quo
= build_fold_indirect_ref (arg_quo
);
9707 /* Proceed iff a valid pointer type was passed in. */
9708 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
9710 /* Set the value. */
9712 = fold_build2 (MODIFY_EXPR
, TREE_TYPE (arg_quo
), arg_quo
,
9713 build_int_cst (TREE_TYPE (arg_quo
),
9715 TREE_SIDE_EFFECTS (result_quo
) = 1;
9716 /* Combine the quo assignment with the rem. */
9717 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
9718 result_quo
, result_rem
));
9726 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
9727 resulting value as a tree with type TYPE. The mpfr precision is
9728 set to the precision of TYPE. We assume that this mpfr function
9729 returns zero if the result could be calculated exactly within the
9730 requested precision. In addition, the integer pointer represented
9731 by ARG_SG will be dereferenced and set to the appropriate signgam
9735 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
9737 tree result
= NULL_TREE
;
9741 /* To proceed, MPFR must exactly represent the target floating point
9742 format, which only happens when the target base equals two. Also
9743 verify ARG is a constant and that ARG_SG is an int pointer. */
9744 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
9745 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
9746 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
9747 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
9749 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
9751 /* In addition to NaN and Inf, the argument cannot be zero or a
9752 negative integer. */
9753 if (real_isfinite (ra
)
9754 && ra
->cl
!= rvc_zero
9755 && !(real_isneg (ra
) && real_isinteger (ra
, TYPE_MODE (type
))))
9757 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
9758 const int prec
= fmt
->p
;
9759 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
9764 mpfr_init2 (m
, prec
);
9765 mpfr_from_real (m
, ra
, GMP_RNDN
);
9766 mpfr_clear_flags ();
9767 inexact
= mpfr_lgamma (m
, &sg
, m
, rnd
);
9768 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
9774 /* Dereference the arg_sg pointer argument. */
9775 arg_sg
= build_fold_indirect_ref (arg_sg
);
9776 /* Assign the signgam value into *arg_sg. */
9777 result_sg
= fold_build2 (MODIFY_EXPR
,
9778 TREE_TYPE (arg_sg
), arg_sg
,
9779 build_int_cst (TREE_TYPE (arg_sg
), sg
));
9780 TREE_SIDE_EFFECTS (result_sg
) = 1;
9781 /* Combine the signgam assignment with the lgamma result. */
9782 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
9783 result_sg
, result_lg
));
9791 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
9792 mpc function FUNC on it and return the resulting value as a tree
9793 with type TYPE. The mpfr precision is set to the precision of
9794 TYPE. We assume that function FUNC returns zero if the result
9795 could be calculated exactly within the requested precision. If
9796 DO_NONFINITE is true, then fold expressions containing Inf or NaN
9797 in the arguments and/or results. */
9800 do_mpc_arg2 (tree arg0
, tree arg1
, tree type
, int do_nonfinite
,
9801 int (*func
)(mpc_ptr
, mpc_srcptr
, mpc_srcptr
, mpc_rnd_t
))
9803 tree result
= NULL_TREE
;
9808 /* To proceed, MPFR must exactly represent the target floating point
9809 format, which only happens when the target base equals two. */
9810 if (TREE_CODE (arg0
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg0
)
9811 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0
))) == REAL_TYPE
9812 && TREE_CODE (arg1
) == COMPLEX_CST
&& !TREE_OVERFLOW (arg1
)
9813 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1
))) == REAL_TYPE
9814 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0
))))->b
== 2)
9816 const REAL_VALUE_TYPE
*const re0
= TREE_REAL_CST_PTR (TREE_REALPART (arg0
));
9817 const REAL_VALUE_TYPE
*const im0
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg0
));
9818 const REAL_VALUE_TYPE
*const re1
= TREE_REAL_CST_PTR (TREE_REALPART (arg1
));
9819 const REAL_VALUE_TYPE
*const im1
= TREE_REAL_CST_PTR (TREE_IMAGPART (arg1
));
9822 || (real_isfinite (re0
) && real_isfinite (im0
)
9823 && real_isfinite (re1
) && real_isfinite (im1
)))
9825 const struct real_format
*const fmt
=
9826 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type
)));
9827 const int prec
= fmt
->p
;
9828 const mp_rnd_t rnd
= fmt
->round_towards_zero
? GMP_RNDZ
: GMP_RNDN
;
9829 const mpc_rnd_t crnd
= fmt
->round_towards_zero
? MPC_RNDZZ
: MPC_RNDNN
;
9833 mpc_init2 (m0
, prec
);
9834 mpc_init2 (m1
, prec
);
9835 mpfr_from_real (mpc_realref (m0
), re0
, rnd
);
9836 mpfr_from_real (mpc_imagref (m0
), im0
, rnd
);
9837 mpfr_from_real (mpc_realref (m1
), re1
, rnd
);
9838 mpfr_from_real (mpc_imagref (m1
), im1
, rnd
);
9839 mpfr_clear_flags ();
9840 inexact
= func (m0
, m0
, m1
, crnd
);
9841 result
= do_mpc_ckconv (m0
, type
, inexact
, do_nonfinite
);
9850 /* A wrapper function for builtin folding that prevents warnings for
9851 "statement without effect" and the like, caused by removing the
9852 call node earlier than the warning is generated. */
9855 fold_call_stmt (gcall
*stmt
, bool ignore
)
9857 tree ret
= NULL_TREE
;
9858 tree fndecl
= gimple_call_fndecl (stmt
);
9859 location_t loc
= gimple_location (stmt
);
9861 && TREE_CODE (fndecl
) == FUNCTION_DECL
9862 && DECL_BUILT_IN (fndecl
)
9863 && !gimple_call_va_arg_pack_p (stmt
))
9865 int nargs
= gimple_call_num_args (stmt
);
9866 tree
*args
= (nargs
> 0
9867 ? gimple_call_arg_ptr (stmt
, 0)
9868 : &error_mark_node
);
9870 if (avoid_folding_inline_builtin (fndecl
))
9872 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
9874 return targetm
.fold_builtin (fndecl
, nargs
, args
, ignore
);
9878 ret
= fold_builtin_n (loc
, fndecl
, args
, nargs
, ignore
);
9881 /* Propagate location information from original call to
9882 expansion of builtin. Otherwise things like
9883 maybe_emit_chk_warning, that operate on the expansion
9884 of a builtin, will use the wrong location information. */
9885 if (gimple_has_location (stmt
))
9888 if (TREE_CODE (ret
) == NOP_EXPR
)
9889 realret
= TREE_OPERAND (ret
, 0);
9890 if (CAN_HAVE_LOCATION_P (realret
)
9891 && !EXPR_HAS_LOCATION (realret
))
9892 SET_EXPR_LOCATION (realret
, loc
);
9902 /* Look up the function in builtin_decl that corresponds to DECL
9903 and set ASMSPEC as its user assembler name. DECL must be a
9904 function decl that declares a builtin. */
9907 set_builtin_user_assembler_name (tree decl
, const char *asmspec
)
9909 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
9910 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
9913 tree builtin
= builtin_decl_explicit (DECL_FUNCTION_CODE (decl
));
9914 set_user_assembler_name (builtin
, asmspec
);
9916 if (DECL_FUNCTION_CODE (decl
) == BUILT_IN_FFS
9917 && INT_TYPE_SIZE
< BITS_PER_WORD
)
9919 set_user_assembler_libfunc ("ffs", asmspec
);
9920 set_optab_libfunc (ffs_optab
, mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0),
9925 /* Return true if DECL is a builtin that expands to a constant or similarly
9928 is_simple_builtin (tree decl
)
9930 if (decl
&& DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
9931 switch (DECL_FUNCTION_CODE (decl
))
9933 /* Builtins that expand to constants. */
9934 case BUILT_IN_CONSTANT_P
:
9935 case BUILT_IN_EXPECT
:
9936 case BUILT_IN_OBJECT_SIZE
:
9937 case BUILT_IN_UNREACHABLE
:
9938 /* Simple register moves or loads from stack. */
9939 case BUILT_IN_ASSUME_ALIGNED
:
9940 case BUILT_IN_RETURN_ADDRESS
:
9941 case BUILT_IN_EXTRACT_RETURN_ADDR
:
9942 case BUILT_IN_FROB_RETURN_ADDR
:
9943 case BUILT_IN_RETURN
:
9944 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
9945 case BUILT_IN_FRAME_ADDRESS
:
9946 case BUILT_IN_VA_END
:
9947 case BUILT_IN_STACK_SAVE
:
9948 case BUILT_IN_STACK_RESTORE
:
9949 /* Exception state returns or moves registers around. */
9950 case BUILT_IN_EH_FILTER
:
9951 case BUILT_IN_EH_POINTER
:
9952 case BUILT_IN_EH_COPY_VALUES
:
9962 /* Return true if DECL is a builtin that is not expensive, i.e., they are
9963 most probably expanded inline into reasonably simple code. This is a
9964 superset of is_simple_builtin. */
9966 is_inexpensive_builtin (tree decl
)
9970 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_MD
)
9972 else if (DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
)
9973 switch (DECL_FUNCTION_CODE (decl
))
9976 case BUILT_IN_ALLOCA
:
9977 case BUILT_IN_ALLOCA_WITH_ALIGN
:
9978 case BUILT_IN_BSWAP16
:
9979 case BUILT_IN_BSWAP32
:
9980 case BUILT_IN_BSWAP64
:
9982 case BUILT_IN_CLZIMAX
:
9984 case BUILT_IN_CLZLL
:
9986 case BUILT_IN_CTZIMAX
:
9988 case BUILT_IN_CTZLL
:
9990 case BUILT_IN_FFSIMAX
:
9992 case BUILT_IN_FFSLL
:
9993 case BUILT_IN_IMAXABS
:
9994 case BUILT_IN_FINITE
:
9995 case BUILT_IN_FINITEF
:
9996 case BUILT_IN_FINITEL
:
9997 case BUILT_IN_FINITED32
:
9998 case BUILT_IN_FINITED64
:
9999 case BUILT_IN_FINITED128
:
10000 case BUILT_IN_FPCLASSIFY
:
10001 case BUILT_IN_ISFINITE
:
10002 case BUILT_IN_ISINF_SIGN
:
10003 case BUILT_IN_ISINF
:
10004 case BUILT_IN_ISINFF
:
10005 case BUILT_IN_ISINFL
:
10006 case BUILT_IN_ISINFD32
:
10007 case BUILT_IN_ISINFD64
:
10008 case BUILT_IN_ISINFD128
:
10009 case BUILT_IN_ISNAN
:
10010 case BUILT_IN_ISNANF
:
10011 case BUILT_IN_ISNANL
:
10012 case BUILT_IN_ISNAND32
:
10013 case BUILT_IN_ISNAND64
:
10014 case BUILT_IN_ISNAND128
:
10015 case BUILT_IN_ISNORMAL
:
10016 case BUILT_IN_ISGREATER
:
10017 case BUILT_IN_ISGREATEREQUAL
:
10018 case BUILT_IN_ISLESS
:
10019 case BUILT_IN_ISLESSEQUAL
:
10020 case BUILT_IN_ISLESSGREATER
:
10021 case BUILT_IN_ISUNORDERED
:
10022 case BUILT_IN_VA_ARG_PACK
:
10023 case BUILT_IN_VA_ARG_PACK_LEN
:
10024 case BUILT_IN_VA_COPY
:
10025 case BUILT_IN_TRAP
:
10026 case BUILT_IN_SAVEREGS
:
10027 case BUILT_IN_POPCOUNTL
:
10028 case BUILT_IN_POPCOUNTLL
:
10029 case BUILT_IN_POPCOUNTIMAX
:
10030 case BUILT_IN_POPCOUNT
:
10031 case BUILT_IN_PARITYL
:
10032 case BUILT_IN_PARITYLL
:
10033 case BUILT_IN_PARITYIMAX
:
10034 case BUILT_IN_PARITY
:
10035 case BUILT_IN_LABS
:
10036 case BUILT_IN_LLABS
:
10037 case BUILT_IN_PREFETCH
:
10038 case BUILT_IN_ACC_ON_DEVICE
:
10042 return is_simple_builtin (decl
);