1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
30 #include "tree-gimple.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
50 #include "tree-flow.h"
51 #include "value-prof.h"
52 #include "diagnostic.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names
[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names
[(int) END_BUILTINS
] =
65 #include "builtins.def"
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls
[(int) END_BUILTINS
];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls
[(int) END_BUILTINS
];
77 static const char *c_getstr (tree
);
78 static rtx
c_readstr (const char *, enum machine_mode
);
79 static int target_char_cast (tree
, char *);
80 static rtx
get_memory_rtx (tree
, tree
);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx
result_vector (int, rtx
);
86 static void expand_builtin_update_setjmp_buf (rtx
);
87 static void expand_builtin_prefetch (tree
);
88 static rtx
expand_builtin_apply_args (void);
89 static rtx
expand_builtin_apply_args_1 (void);
90 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
91 static void expand_builtin_return (rtx
);
92 static enum type_class
type_to_class (tree
);
93 static rtx
expand_builtin_classify_type (tree
);
94 static void expand_errno_check (tree
, rtx
);
95 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
96 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
97 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
98 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
, rtx
);
99 static rtx
expand_builtin_sincos (tree
);
100 static rtx
expand_builtin_cexpi (tree
, rtx
, rtx
);
101 static rtx
expand_builtin_int_roundingfn (tree
, rtx
, rtx
);
102 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
, rtx
);
103 static rtx
expand_builtin_args_info (tree
);
104 static rtx
expand_builtin_next_arg (void);
105 static rtx
expand_builtin_va_start (tree
);
106 static rtx
expand_builtin_va_end (tree
);
107 static rtx
expand_builtin_va_copy (tree
);
108 static rtx
expand_builtin_memchr (tree
, rtx
, enum machine_mode
);
109 static rtx
expand_builtin_memcmp (tree
, rtx
, enum machine_mode
);
110 static rtx
expand_builtin_strcmp (tree
, rtx
, enum machine_mode
);
111 static rtx
expand_builtin_strncmp (tree
, rtx
, enum machine_mode
);
112 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
113 static rtx
expand_builtin_strcat (tree
, tree
, rtx
, enum machine_mode
);
114 static rtx
expand_builtin_strncat (tree
, rtx
, enum machine_mode
);
115 static rtx
expand_builtin_strspn (tree
, rtx
, enum machine_mode
);
116 static rtx
expand_builtin_strcspn (tree
, rtx
, enum machine_mode
);
117 static rtx
expand_builtin_memcpy (tree
, rtx
, enum machine_mode
);
118 static rtx
expand_builtin_mempcpy (tree
, rtx
, enum machine_mode
);
119 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, tree
, rtx
,
120 enum machine_mode
, int);
121 static rtx
expand_builtin_memmove (tree
, rtx
, enum machine_mode
, int);
122 static rtx
expand_builtin_memmove_args (tree
, tree
, tree
, tree
, rtx
,
123 enum machine_mode
, int);
124 static rtx
expand_builtin_bcopy (tree
, int);
125 static rtx
expand_builtin_strcpy (tree
, tree
, rtx
, enum machine_mode
);
126 static rtx
expand_builtin_strcpy_args (tree
, tree
, tree
, rtx
, enum machine_mode
);
127 static rtx
expand_builtin_stpcpy (tree
, rtx
, enum machine_mode
);
128 static rtx
expand_builtin_strncpy (tree
, rtx
, enum machine_mode
);
129 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, enum machine_mode
);
130 static rtx
expand_builtin_memset (tree
, rtx
, enum machine_mode
);
131 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, enum machine_mode
, tree
);
132 static rtx
expand_builtin_bzero (tree
);
133 static rtx
expand_builtin_strlen (tree
, rtx
, enum machine_mode
);
134 static rtx
expand_builtin_strstr (tree
, rtx
, enum machine_mode
);
135 static rtx
expand_builtin_strpbrk (tree
, rtx
, enum machine_mode
);
136 static rtx
expand_builtin_strchr (tree
, rtx
, enum machine_mode
);
137 static rtx
expand_builtin_strrchr (tree
, rtx
, enum machine_mode
);
138 static rtx
expand_builtin_alloca (tree
, rtx
);
139 static rtx
expand_builtin_unop (enum machine_mode
, tree
, rtx
, rtx
, optab
);
140 static rtx
expand_builtin_frame_address (tree
, tree
);
141 static rtx
expand_builtin_fputs (tree
, rtx
, bool);
142 static rtx
expand_builtin_printf (tree
, rtx
, enum machine_mode
, bool);
143 static rtx
expand_builtin_fprintf (tree
, rtx
, enum machine_mode
, bool);
144 static rtx
expand_builtin_sprintf (tree
, rtx
, enum machine_mode
);
145 static tree
stabilize_va_list (tree
, int);
146 static rtx
expand_builtin_expect (tree
, rtx
);
147 static tree
fold_builtin_constant_p (tree
);
148 static tree
fold_builtin_expect (tree
, tree
);
149 static tree
fold_builtin_classify_type (tree
);
150 static tree
fold_builtin_strlen (tree
);
151 static tree
fold_builtin_inf (tree
, int);
152 static tree
fold_builtin_nan (tree
, tree
, int);
153 static tree
rewrite_call_expr (tree
, int, tree
, int, ...);
154 static bool validate_arg (const_tree
, enum tree_code code
);
155 static bool integer_valued_real_p (tree
);
156 static tree
fold_trunc_transparent_mathfn (tree
, tree
);
157 static bool readonly_data_expr (tree
);
158 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
159 static rtx
expand_builtin_signbit (tree
, rtx
);
160 static tree
fold_builtin_sqrt (tree
, tree
);
161 static tree
fold_builtin_cbrt (tree
, tree
);
162 static tree
fold_builtin_pow (tree
, tree
, tree
, tree
);
163 static tree
fold_builtin_powi (tree
, tree
, tree
, tree
);
164 static tree
fold_builtin_cos (tree
, tree
, tree
);
165 static tree
fold_builtin_cosh (tree
, tree
, tree
);
166 static tree
fold_builtin_tan (tree
, tree
);
167 static tree
fold_builtin_trunc (tree
, tree
);
168 static tree
fold_builtin_floor (tree
, tree
);
169 static tree
fold_builtin_ceil (tree
, tree
);
170 static tree
fold_builtin_round (tree
, tree
);
171 static tree
fold_builtin_int_roundingfn (tree
, tree
);
172 static tree
fold_builtin_bitop (tree
, tree
);
173 static tree
fold_builtin_memory_op (tree
, tree
, tree
, tree
, bool, int);
174 static tree
fold_builtin_strchr (tree
, tree
, tree
);
175 static tree
fold_builtin_memchr (tree
, tree
, tree
, tree
);
176 static tree
fold_builtin_memcmp (tree
, tree
, tree
);
177 static tree
fold_builtin_strcmp (tree
, tree
);
178 static tree
fold_builtin_strncmp (tree
, tree
, tree
);
179 static tree
fold_builtin_signbit (tree
, tree
);
180 static tree
fold_builtin_copysign (tree
, tree
, tree
, tree
);
181 static tree
fold_builtin_isascii (tree
);
182 static tree
fold_builtin_toascii (tree
);
183 static tree
fold_builtin_isdigit (tree
);
184 static tree
fold_builtin_fabs (tree
, tree
);
185 static tree
fold_builtin_abs (tree
, tree
);
186 static tree
fold_builtin_unordered_cmp (tree
, tree
, tree
, enum tree_code
,
188 static tree
fold_builtin_n (tree
, tree
*, int, bool);
189 static tree
fold_builtin_0 (tree
, bool);
190 static tree
fold_builtin_1 (tree
, tree
, bool);
191 static tree
fold_builtin_2 (tree
, tree
, tree
, bool);
192 static tree
fold_builtin_3 (tree
, tree
, tree
, tree
, bool);
193 static tree
fold_builtin_4 (tree
, tree
, tree
, tree
, tree
, bool);
194 static tree
fold_builtin_varargs (tree
, tree
, bool);
196 static tree
fold_builtin_strpbrk (tree
, tree
, tree
);
197 static tree
fold_builtin_strstr (tree
, tree
, tree
);
198 static tree
fold_builtin_strrchr (tree
, tree
, tree
);
199 static tree
fold_builtin_strcat (tree
, tree
);
200 static tree
fold_builtin_strncat (tree
, tree
, tree
);
201 static tree
fold_builtin_strspn (tree
, tree
);
202 static tree
fold_builtin_strcspn (tree
, tree
);
203 static tree
fold_builtin_sprintf (tree
, tree
, tree
, int);
205 static rtx
expand_builtin_object_size (tree
);
206 static rtx
expand_builtin_memory_chk (tree
, rtx
, enum machine_mode
,
207 enum built_in_function
);
208 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
209 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
210 static tree
fold_builtin_object_size (tree
, tree
);
211 static tree
fold_builtin_strcat_chk (tree
, tree
, tree
, tree
);
212 static tree
fold_builtin_strncat_chk (tree
, tree
, tree
, tree
, tree
);
213 static tree
fold_builtin_sprintf_chk (tree
, enum built_in_function
);
214 static tree
fold_builtin_printf (tree
, tree
, tree
, bool, enum built_in_function
);
215 static tree
fold_builtin_fprintf (tree
, tree
, tree
, tree
, bool,
216 enum built_in_function
);
217 static bool init_target_chars (void);
219 static unsigned HOST_WIDE_INT target_newline
;
220 static unsigned HOST_WIDE_INT target_percent
;
221 static unsigned HOST_WIDE_INT target_c
;
222 static unsigned HOST_WIDE_INT target_s
;
223 static char target_percent_c
[3];
224 static char target_percent_s
[3];
225 static char target_percent_s_newline
[4];
226 static tree
do_mpfr_arg1 (tree
, tree
, int (*)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
227 const REAL_VALUE_TYPE
*, const REAL_VALUE_TYPE
*, bool);
228 static tree
do_mpfr_arg2 (tree
, tree
, tree
,
229 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
230 static tree
do_mpfr_arg3 (tree
, tree
, tree
, tree
,
231 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
232 static tree
do_mpfr_sincos (tree
, tree
, tree
);
233 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
234 static tree
do_mpfr_bessel_n (tree
, tree
, tree
,
235 int (*)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
236 const REAL_VALUE_TYPE
*, bool);
237 static tree
do_mpfr_remquo (tree
, tree
, tree
);
238 static tree
do_mpfr_lgamma_r (tree
, tree
, tree
);
241 /* Return true if NODE should be considered for inline expansion regardless
242 of the optimization level. This means whenever a function is invoked with
243 its "internal" name, which normally contains the prefix "__builtin". */
245 static bool called_as_built_in (tree node
)
247 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
248 if (strncmp (name
, "__builtin_", 10) == 0)
250 if (strncmp (name
, "__sync_", 7) == 0)
255 /* Return the alignment in bits of EXP, a pointer valued expression.
256 But don't return more than MAX_ALIGN no matter what.
257 The alignment returned is, by default, the alignment of the thing that
258 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
260 Otherwise, look at the expression to see if we can do better, i.e., if the
261 expression is actually pointing at an object whose alignment is tighter. */
264 get_pointer_alignment (tree exp
, unsigned int max_align
)
266 unsigned int align
, inner
;
268 /* We rely on TER to compute accurate alignment information. */
269 if (!(optimize
&& flag_tree_ter
))
272 if (!POINTER_TYPE_P (TREE_TYPE (exp
)))
275 align
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
276 align
= MIN (align
, max_align
);
280 switch (TREE_CODE (exp
))
284 case NON_LVALUE_EXPR
:
285 exp
= TREE_OPERAND (exp
, 0);
286 if (! POINTER_TYPE_P (TREE_TYPE (exp
)))
289 inner
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
290 align
= MIN (inner
, max_align
);
293 case POINTER_PLUS_EXPR
:
294 /* If sum of pointer + int, restrict our maximum alignment to that
295 imposed by the integer. If not, we can't do any better than
297 if (! host_integerp (TREE_OPERAND (exp
, 1), 1))
300 while (((tree_low_cst (TREE_OPERAND (exp
, 1), 1))
301 & (max_align
/ BITS_PER_UNIT
- 1))
305 exp
= TREE_OPERAND (exp
, 0);
309 /* See what we are pointing at and look at its alignment. */
310 exp
= TREE_OPERAND (exp
, 0);
312 if (handled_component_p (exp
))
314 HOST_WIDE_INT bitsize
, bitpos
;
316 enum machine_mode mode
;
317 int unsignedp
, volatilep
;
319 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
320 &mode
, &unsignedp
, &volatilep
, true);
322 inner
= MIN (inner
, (unsigned) (bitpos
& -bitpos
));
323 if (offset
&& TREE_CODE (offset
) == PLUS_EXPR
324 && host_integerp (TREE_OPERAND (offset
, 1), 1))
326 /* Any overflow in calculating offset_bits won't change
329 = ((unsigned) tree_low_cst (TREE_OPERAND (offset
, 1), 1)
333 inner
= MIN (inner
, (offset_bits
& -offset_bits
));
334 offset
= TREE_OPERAND (offset
, 0);
336 if (offset
&& TREE_CODE (offset
) == MULT_EXPR
337 && host_integerp (TREE_OPERAND (offset
, 1), 1))
339 /* Any overflow in calculating offset_factor won't change
341 unsigned offset_factor
342 = ((unsigned) tree_low_cst (TREE_OPERAND (offset
, 1), 1)
346 inner
= MIN (inner
, (offset_factor
& -offset_factor
));
349 inner
= MIN (inner
, BITS_PER_UNIT
);
352 align
= MIN (inner
, DECL_ALIGN (exp
));
353 #ifdef CONSTANT_ALIGNMENT
354 else if (CONSTANT_CLASS_P (exp
))
355 align
= MIN (inner
, (unsigned)CONSTANT_ALIGNMENT (exp
, align
));
357 else if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
358 || TREE_CODE (exp
) == INDIRECT_REF
)
359 align
= MIN (TYPE_ALIGN (TREE_TYPE (exp
)), inner
);
361 align
= MIN (align
, inner
);
362 return MIN (align
, max_align
);
370 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
371 way, because it could contain a zero byte in the middle.
372 TREE_STRING_LENGTH is the size of the character array, not the string.
374 ONLY_VALUE should be nonzero if the result is not going to be emitted
375 into the instruction stream and zero if it is going to be expanded.
376 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
377 is returned, otherwise NULL, since
378 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
379 evaluate the side-effects.
381 The value returned is of type `ssizetype'.
383 Unfortunately, string_constant can't access the values of const char
384 arrays with initializers, so neither can we do so here. */
387 c_strlen (tree src
, int only_value
)
390 HOST_WIDE_INT offset
;
395 if (TREE_CODE (src
) == COND_EXPR
396 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
400 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
401 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
402 if (tree_int_cst_equal (len1
, len2
))
406 if (TREE_CODE (src
) == COMPOUND_EXPR
407 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
408 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
410 src
= string_constant (src
, &offset_node
);
414 max
= TREE_STRING_LENGTH (src
) - 1;
415 ptr
= TREE_STRING_POINTER (src
);
417 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
419 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
420 compute the offset to the following null if we don't know where to
421 start searching for it. */
424 for (i
= 0; i
< max
; i
++)
428 /* We don't know the starting offset, but we do know that the string
429 has no internal zero bytes. We can assume that the offset falls
430 within the bounds of the string; otherwise, the programmer deserves
431 what he gets. Subtract the offset from the length of the string,
432 and return that. This would perhaps not be valid if we were dealing
433 with named arrays in addition to literal string constants. */
435 return size_diffop (size_int (max
), offset_node
);
438 /* We have a known offset into the string. Start searching there for
439 a null character if we can represent it as a single HOST_WIDE_INT. */
440 if (offset_node
== 0)
442 else if (! host_integerp (offset_node
, 0))
445 offset
= tree_low_cst (offset_node
, 0);
447 /* If the offset is known to be out of bounds, warn, and call strlen at
449 if (offset
< 0 || offset
> max
)
451 warning (0, "offset outside bounds of constant string");
455 /* Use strlen to search for the first zero byte. Since any strings
456 constructed with build_string will have nulls appended, we win even
457 if we get handed something like (char[4])"abcd".
459 Since OFFSET is our starting index into the string, no further
460 calculation is needed. */
461 return ssize_int (strlen (ptr
+ offset
));
464 /* Return a char pointer for a C string if it is a string constant
465 or sum of string constant and integer constant. */
472 src
= string_constant (src
, &offset_node
);
476 if (offset_node
== 0)
477 return TREE_STRING_POINTER (src
);
478 else if (!host_integerp (offset_node
, 1)
479 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
482 return TREE_STRING_POINTER (src
) + tree_low_cst (offset_node
, 1);
485 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
486 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
489 c_readstr (const char *str
, enum machine_mode mode
)
495 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
500 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
503 if (WORDS_BIG_ENDIAN
)
504 j
= GET_MODE_SIZE (mode
) - i
- 1;
505 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
506 && GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
507 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
509 gcc_assert (j
<= 2 * HOST_BITS_PER_WIDE_INT
);
512 ch
= (unsigned char) str
[i
];
513 c
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
515 return immed_double_const (c
[0], c
[1], mode
);
518 /* Cast a target constant CST to target CHAR and if that value fits into
519 host char type, return zero and put that value into variable pointed to by
523 target_char_cast (tree cst
, char *p
)
525 unsigned HOST_WIDE_INT val
, hostval
;
527 if (!host_integerp (cst
, 1)
528 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
531 val
= tree_low_cst (cst
, 1);
532 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
533 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
536 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
537 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
546 /* Similar to save_expr, but assumes that arbitrary code is not executed
547 in between the multiple evaluations. In particular, we assume that a
548 non-addressable local variable will not be modified. */
551 builtin_save_expr (tree exp
)
553 if (TREE_ADDRESSABLE (exp
) == 0
554 && (TREE_CODE (exp
) == PARM_DECL
555 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
))))
558 return save_expr (exp
);
561 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
562 times to get the address of either a higher stack frame, or a return
563 address located within it (depending on FNDECL_CODE). */
566 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
570 #ifdef INITIAL_FRAME_ADDRESS_RTX
571 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
575 /* For a zero count with __builtin_return_address, we don't care what
576 frame address we return, because target-specific definitions will
577 override us. Therefore frame pointer elimination is OK, and using
578 the soft frame pointer is OK.
580 For a nonzero count, or a zero count with __builtin_frame_address,
581 we require a stable offset from the current frame pointer to the
582 previous one, so we must use the hard frame pointer, and
583 we must disable frame pointer elimination. */
584 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
585 tem
= frame_pointer_rtx
;
588 tem
= hard_frame_pointer_rtx
;
590 /* Tell reload not to eliminate the frame pointer. */
591 current_function_accesses_prior_frames
= 1;
595 /* Some machines need special handling before we can access
596 arbitrary frames. For example, on the SPARC, we must first flush
597 all register windows to the stack. */
598 #ifdef SETUP_FRAME_ADDRESSES
600 SETUP_FRAME_ADDRESSES ();
603 /* On the SPARC, the return address is not in the frame, it is in a
604 register. There is no way to access it off of the current frame
605 pointer, but it can be accessed off the previous frame pointer by
606 reading the value from the register window save area. */
607 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
608 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
612 /* Scan back COUNT frames to the specified frame. */
613 for (i
= 0; i
< count
; i
++)
615 /* Assume the dynamic chain pointer is in the word that the
616 frame address points to, unless otherwise specified. */
617 #ifdef DYNAMIC_CHAIN_ADDRESS
618 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
620 tem
= memory_address (Pmode
, tem
);
621 tem
= gen_frame_mem (Pmode
, tem
);
622 tem
= copy_to_reg (tem
);
625 /* For __builtin_frame_address, return what we've got. But, on
626 the SPARC for example, we may have to add a bias. */
627 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
628 #ifdef FRAME_ADDR_RTX
629 return FRAME_ADDR_RTX (tem
);
634 /* For __builtin_return_address, get the return address from that frame. */
635 #ifdef RETURN_ADDR_RTX
636 tem
= RETURN_ADDR_RTX (count
, tem
);
638 tem
= memory_address (Pmode
,
639 plus_constant (tem
, GET_MODE_SIZE (Pmode
)));
640 tem
= gen_frame_mem (Pmode
, tem
);
645 /* Alias set used for setjmp buffer. */
646 static alias_set_type setjmp_alias_set
= -1;
648 /* Construct the leading half of a __builtin_setjmp call. Control will
649 return to RECEIVER_LABEL. This is also called directly by the SJLJ
650 exception handling code. */
653 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
655 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
659 if (setjmp_alias_set
== -1)
660 setjmp_alias_set
= new_alias_set ();
662 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
664 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
666 /* We store the frame pointer and the address of receiver_label in
667 the buffer and use the rest of it for the stack save area, which
668 is machine-dependent. */
670 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
671 set_mem_alias_set (mem
, setjmp_alias_set
);
672 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
674 mem
= gen_rtx_MEM (Pmode
, plus_constant (buf_addr
, GET_MODE_SIZE (Pmode
))),
675 set_mem_alias_set (mem
, setjmp_alias_set
);
677 emit_move_insn (validize_mem (mem
),
678 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
680 stack_save
= gen_rtx_MEM (sa_mode
,
681 plus_constant (buf_addr
,
682 2 * GET_MODE_SIZE (Pmode
)));
683 set_mem_alias_set (stack_save
, setjmp_alias_set
);
684 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
686 /* If there is further processing to do, do it. */
687 #ifdef HAVE_builtin_setjmp_setup
688 if (HAVE_builtin_setjmp_setup
)
689 emit_insn (gen_builtin_setjmp_setup (buf_addr
));
692 /* Tell optimize_save_area_alloca that extra work is going to
693 need to go on during alloca. */
694 current_function_calls_setjmp
= 1;
696 /* We have a nonlocal label. */
697 current_function_has_nonlocal_label
= 1;
700 /* Construct the trailing part of a __builtin_setjmp call. This is
701 also called directly by the SJLJ exception handling code. */
704 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED
)
706 /* Clobber the FP when we get here, so we have to make sure it's
707 marked as used by this function. */
708 emit_insn (gen_rtx_USE (VOIDmode
, hard_frame_pointer_rtx
));
710 /* Mark the static chain as clobbered here so life information
711 doesn't get messed up for it. */
712 emit_insn (gen_rtx_CLOBBER (VOIDmode
, static_chain_rtx
));
714 /* Now put in the code to restore the frame pointer, and argument
715 pointer, if needed. */
716 #ifdef HAVE_nonlocal_goto
717 if (! HAVE_nonlocal_goto
)
720 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
721 /* This might change the hard frame pointer in ways that aren't
722 apparent to early optimization passes, so force a clobber. */
723 emit_insn (gen_rtx_CLOBBER (VOIDmode
, hard_frame_pointer_rtx
));
726 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
727 if (fixed_regs
[ARG_POINTER_REGNUM
])
729 #ifdef ELIMINABLE_REGS
731 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
733 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
734 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
735 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
738 if (i
== ARRAY_SIZE (elim_regs
))
741 /* Now restore our arg pointer from the address at which it
742 was saved in our stack frame. */
743 emit_move_insn (virtual_incoming_args_rtx
,
744 copy_to_reg (get_arg_pointer_save_area (cfun
)));
749 #ifdef HAVE_builtin_setjmp_receiver
750 if (HAVE_builtin_setjmp_receiver
)
751 emit_insn (gen_builtin_setjmp_receiver (receiver_label
));
754 #ifdef HAVE_nonlocal_goto_receiver
755 if (HAVE_nonlocal_goto_receiver
)
756 emit_insn (gen_nonlocal_goto_receiver ());
761 /* We must not allow the code we just generated to be reordered by
762 scheduling. Specifically, the update of the frame pointer must
763 happen immediately, not later. */
764 emit_insn (gen_blockage ());
767 /* __builtin_longjmp is passed a pointer to an array of five words (not
768 all will be used on all machines). It operates similarly to the C
769 library function of the same name, but is more efficient. Much of
770 the code below is copied from the handling of non-local gotos. */
773 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
775 rtx fp
, lab
, stack
, insn
, last
;
776 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
778 if (setjmp_alias_set
== -1)
779 setjmp_alias_set
= new_alias_set ();
781 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
783 buf_addr
= force_reg (Pmode
, buf_addr
);
785 /* We used to store value in static_chain_rtx, but that fails if pointers
786 are smaller than integers. We instead require that the user must pass
787 a second argument of 1, because that is what builtin_setjmp will
788 return. This also makes EH slightly more efficient, since we are no
789 longer copying around a value that we don't care about. */
790 gcc_assert (value
== const1_rtx
);
792 last
= get_last_insn ();
793 #ifdef HAVE_builtin_longjmp
794 if (HAVE_builtin_longjmp
)
795 emit_insn (gen_builtin_longjmp (buf_addr
));
799 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
800 lab
= gen_rtx_MEM (Pmode
, plus_constant (buf_addr
,
801 GET_MODE_SIZE (Pmode
)));
803 stack
= gen_rtx_MEM (sa_mode
, plus_constant (buf_addr
,
804 2 * GET_MODE_SIZE (Pmode
)));
805 set_mem_alias_set (fp
, setjmp_alias_set
);
806 set_mem_alias_set (lab
, setjmp_alias_set
);
807 set_mem_alias_set (stack
, setjmp_alias_set
);
809 /* Pick up FP, label, and SP from the block and jump. This code is
810 from expand_goto in stmt.c; see there for detailed comments. */
811 #ifdef HAVE_nonlocal_goto
812 if (HAVE_nonlocal_goto
)
813 /* We have to pass a value to the nonlocal_goto pattern that will
814 get copied into the static_chain pointer, but it does not matter
815 what that value is, because builtin_setjmp does not use it. */
816 emit_insn (gen_nonlocal_goto (value
, lab
, stack
, fp
));
820 lab
= copy_to_reg (lab
);
822 emit_insn (gen_rtx_CLOBBER (VOIDmode
,
823 gen_rtx_MEM (BLKmode
,
824 gen_rtx_SCRATCH (VOIDmode
))));
825 emit_insn (gen_rtx_CLOBBER (VOIDmode
,
826 gen_rtx_MEM (BLKmode
,
827 hard_frame_pointer_rtx
)));
829 emit_move_insn (hard_frame_pointer_rtx
, fp
);
830 emit_stack_restore (SAVE_NONLOCAL
, stack
, NULL_RTX
);
832 emit_insn (gen_rtx_USE (VOIDmode
, hard_frame_pointer_rtx
));
833 emit_insn (gen_rtx_USE (VOIDmode
, stack_pointer_rtx
));
834 emit_indirect_jump (lab
);
838 /* Search backwards and mark the jump insn as a non-local goto.
839 Note that this precludes the use of __builtin_longjmp to a
840 __builtin_setjmp target in the same function. However, we've
841 already cautioned the user that these functions are for
842 internal exception handling use only. */
843 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
845 gcc_assert (insn
!= last
);
849 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO
, const0_rtx
,
853 else if (CALL_P (insn
))
858 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
859 and the address of the save area. */
862 expand_builtin_nonlocal_goto (tree exp
)
864 tree t_label
, t_save_area
;
865 rtx r_label
, r_save_area
, r_fp
, r_sp
, insn
;
867 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
870 t_label
= CALL_EXPR_ARG (exp
, 0);
871 t_save_area
= CALL_EXPR_ARG (exp
, 1);
873 r_label
= expand_normal (t_label
);
874 r_label
= convert_memory_address (Pmode
, r_label
);
875 r_save_area
= expand_normal (t_save_area
);
876 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
877 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
878 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
879 plus_constant (r_save_area
, GET_MODE_SIZE (Pmode
)));
881 current_function_has_nonlocal_goto
= 1;
883 #ifdef HAVE_nonlocal_goto
884 /* ??? We no longer need to pass the static chain value, afaik. */
885 if (HAVE_nonlocal_goto
)
886 emit_insn (gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
890 r_label
= copy_to_reg (r_label
);
892 emit_insn (gen_rtx_CLOBBER (VOIDmode
,
893 gen_rtx_MEM (BLKmode
,
894 gen_rtx_SCRATCH (VOIDmode
))));
896 emit_insn (gen_rtx_CLOBBER (VOIDmode
,
897 gen_rtx_MEM (BLKmode
,
898 hard_frame_pointer_rtx
)));
900 /* Restore frame pointer for containing function.
901 This sets the actual hard register used for the frame pointer
902 to the location of the function's incoming static chain info.
903 The non-local goto handler will then adjust it to contain the
904 proper value and reload the argument pointer, if needed. */
905 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
906 emit_stack_restore (SAVE_NONLOCAL
, r_sp
, NULL_RTX
);
908 /* USE of hard_frame_pointer_rtx added for consistency;
909 not clear if really needed. */
910 emit_insn (gen_rtx_USE (VOIDmode
, hard_frame_pointer_rtx
));
911 emit_insn (gen_rtx_USE (VOIDmode
, stack_pointer_rtx
));
913 /* If the architecture is using a GP register, we must
914 conservatively assume that the target function makes use of it.
915 The prologue of functions with nonlocal gotos must therefore
916 initialize the GP register to the appropriate value, and we
917 must then make sure that this value is live at the point
918 of the jump. (Note that this doesn't necessarily apply
919 to targets with a nonlocal_goto pattern; they are free
920 to implement it in their own way. Note also that this is
921 a no-op if the GP register is a global invariant.) */
922 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
923 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
924 emit_insn (gen_rtx_USE (VOIDmode
, pic_offset_table_rtx
));
926 emit_indirect_jump (r_label
);
929 /* Search backwards to the jump insn and mark it as a
931 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
935 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO
,
936 const0_rtx
, REG_NOTES (insn
));
939 else if (CALL_P (insn
))
946 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
947 (not all will be used on all machines) that was passed to __builtin_setjmp.
948 It updates the stack pointer in that block to correspond to the current
952 expand_builtin_update_setjmp_buf (rtx buf_addr
)
954 enum machine_mode sa_mode
= Pmode
;
958 #ifdef HAVE_save_stack_nonlocal
959 if (HAVE_save_stack_nonlocal
)
960 sa_mode
= insn_data
[(int) CODE_FOR_save_stack_nonlocal
].operand
[0].mode
;
962 #ifdef STACK_SAVEAREA_MODE
963 sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
967 = gen_rtx_MEM (sa_mode
,
970 plus_constant (buf_addr
, 2 * GET_MODE_SIZE (Pmode
))));
974 emit_insn (gen_setjmp ());
977 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
980 /* Expand a call to __builtin_prefetch. For a target that does not support
981 data prefetch, evaluate the memory address argument in case it has side
985 expand_builtin_prefetch (tree exp
)
987 tree arg0
, arg1
, arg2
;
991 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
994 arg0
= CALL_EXPR_ARG (exp
, 0);
996 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
997 zero (read) and argument 2 (locality) defaults to 3 (high degree of
999 nargs
= call_expr_nargs (exp
);
1001 arg1
= CALL_EXPR_ARG (exp
, 1);
1003 arg1
= integer_zero_node
;
1005 arg2
= CALL_EXPR_ARG (exp
, 2);
1007 arg2
= build_int_cst (NULL_TREE
, 3);
1009 /* Argument 0 is an address. */
1010 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
1012 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1013 if (TREE_CODE (arg1
) != INTEGER_CST
)
1015 error ("second argument to %<__builtin_prefetch%> must be a constant");
1016 arg1
= integer_zero_node
;
1018 op1
= expand_normal (arg1
);
1019 /* Argument 1 must be either zero or one. */
1020 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1022 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1027 /* Argument 2 (locality) must be a compile-time constant int. */
1028 if (TREE_CODE (arg2
) != INTEGER_CST
)
1030 error ("third argument to %<__builtin_prefetch%> must be a constant");
1031 arg2
= integer_zero_node
;
1033 op2
= expand_normal (arg2
);
1034 /* Argument 2 must be 0, 1, 2, or 3. */
1035 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1037 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1041 #ifdef HAVE_prefetch
1044 if ((! (*insn_data
[(int) CODE_FOR_prefetch
].operand
[0].predicate
)
1046 insn_data
[(int) CODE_FOR_prefetch
].operand
[0].mode
))
1047 || (GET_MODE (op0
) != Pmode
))
1049 op0
= convert_memory_address (Pmode
, op0
);
1050 op0
= force_reg (Pmode
, op0
);
1052 emit_insn (gen_prefetch (op0
, op1
, op2
));
1056 /* Don't do anything with direct references to volatile memory, but
1057 generate code to handle other side effects. */
1058 if (!MEM_P (op0
) && side_effects_p (op0
))
1062 /* Get a MEM rtx for expression EXP which is the address of an operand
1063 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1064 the maximum length of the block of memory that might be accessed or
1068 get_memory_rtx (tree exp
, tree len
)
1070 rtx addr
= expand_expr (exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1071 rtx mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1073 /* Get an expression we can use to find the attributes to assign to MEM.
1074 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1075 we can. First remove any nops. */
1076 while ((TREE_CODE (exp
) == NOP_EXPR
|| TREE_CODE (exp
) == CONVERT_EXPR
1077 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
1078 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1079 exp
= TREE_OPERAND (exp
, 0);
1081 if (TREE_CODE (exp
) == ADDR_EXPR
)
1082 exp
= TREE_OPERAND (exp
, 0);
1083 else if (POINTER_TYPE_P (TREE_TYPE (exp
)))
1084 exp
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (exp
)), exp
);
1088 /* Honor attributes derived from exp, except for the alias set
1089 (as builtin stringops may alias with anything) and the size
1090 (as stringops may access multiple array elements). */
1093 set_mem_attributes (mem
, exp
, 0);
1095 /* Allow the string and memory builtins to overflow from one
1096 field into another, see http://gcc.gnu.org/PR23561.
1097 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1098 memory accessed by the string or memory builtin will fit
1099 within the field. */
1100 if (MEM_EXPR (mem
) && TREE_CODE (MEM_EXPR (mem
)) == COMPONENT_REF
)
1102 tree mem_expr
= MEM_EXPR (mem
);
1103 HOST_WIDE_INT offset
= -1, length
= -1;
1106 while (TREE_CODE (inner
) == ARRAY_REF
1107 || TREE_CODE (inner
) == NOP_EXPR
1108 || TREE_CODE (inner
) == CONVERT_EXPR
1109 || TREE_CODE (inner
) == NON_LVALUE_EXPR
1110 || TREE_CODE (inner
) == VIEW_CONVERT_EXPR
1111 || TREE_CODE (inner
) == SAVE_EXPR
)
1112 inner
= TREE_OPERAND (inner
, 0);
1114 gcc_assert (TREE_CODE (inner
) == COMPONENT_REF
);
1116 if (MEM_OFFSET (mem
)
1117 && GET_CODE (MEM_OFFSET (mem
)) == CONST_INT
)
1118 offset
= INTVAL (MEM_OFFSET (mem
));
1120 if (offset
>= 0 && len
&& host_integerp (len
, 0))
1121 length
= tree_low_cst (len
, 0);
1123 while (TREE_CODE (inner
) == COMPONENT_REF
)
1125 tree field
= TREE_OPERAND (inner
, 1);
1126 gcc_assert (! DECL_BIT_FIELD (field
));
1127 gcc_assert (TREE_CODE (mem_expr
) == COMPONENT_REF
);
1128 gcc_assert (field
== TREE_OPERAND (mem_expr
, 1));
1131 && TYPE_SIZE_UNIT (TREE_TYPE (inner
))
1132 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner
)), 0))
1135 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner
)), 0);
1136 /* If we can prove the memory starting at XEXP (mem, 0)
1137 and ending at XEXP (mem, 0) + LENGTH will fit into
1138 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1141 && offset
+ length
<= size
)
1146 && host_integerp (DECL_FIELD_OFFSET (field
), 0))
1147 offset
+= tree_low_cst (DECL_FIELD_OFFSET (field
), 0)
1148 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 1)
1156 mem_expr
= TREE_OPERAND (mem_expr
, 0);
1157 inner
= TREE_OPERAND (inner
, 0);
1160 if (mem_expr
== NULL
)
1162 if (mem_expr
!= MEM_EXPR (mem
))
1164 set_mem_expr (mem
, mem_expr
);
1165 set_mem_offset (mem
, offset
>= 0 ? GEN_INT (offset
) : NULL_RTX
);
1168 set_mem_alias_set (mem
, 0);
1169 set_mem_size (mem
, NULL_RTX
);
1175 /* Built-in functions to perform an untyped call and return. */
1177 /* For each register that may be used for calling a function, this
1178 gives a mode used to copy the register's value. VOIDmode indicates
1179 the register is not used for calling a function. If the machine
1180 has register windows, this gives only the outbound registers.
1181 INCOMING_REGNO gives the corresponding inbound register. */
1182 static enum machine_mode apply_args_mode
[FIRST_PSEUDO_REGISTER
];
1184 /* For each register that may be used for returning values, this gives
1185 a mode used to copy the register's value. VOIDmode indicates the
1186 register is not used for returning values. If the machine has
1187 register windows, this gives only the outbound registers.
1188 INCOMING_REGNO gives the corresponding inbound register. */
1189 static enum machine_mode apply_result_mode
[FIRST_PSEUDO_REGISTER
];
1191 /* For each register that may be used for calling a function, this
1192 gives the offset of that register into the block returned by
1193 __builtin_apply_args. 0 indicates that the register is not
1194 used for calling a function. */
1195 static int apply_args_reg_offset
[FIRST_PSEUDO_REGISTER
];
1197 /* Return the size required for the block returned by __builtin_apply_args,
1198 and initialize apply_args_mode. */
1201 apply_args_size (void)
1203 static int size
= -1;
1206 enum machine_mode mode
;
1208 /* The values computed by this function never change. */
1211 /* The first value is the incoming arg-pointer. */
1212 size
= GET_MODE_SIZE (Pmode
);
1214 /* The second value is the structure value address unless this is
1215 passed as an "invisible" first argument. */
1216 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1217 size
+= GET_MODE_SIZE (Pmode
);
1219 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1220 if (FUNCTION_ARG_REGNO_P (regno
))
1222 mode
= reg_raw_mode
[regno
];
1224 gcc_assert (mode
!= VOIDmode
);
1226 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1227 if (size
% align
!= 0)
1228 size
= CEIL (size
, align
) * align
;
1229 apply_args_reg_offset
[regno
] = size
;
1230 size
+= GET_MODE_SIZE (mode
);
1231 apply_args_mode
[regno
] = mode
;
1235 apply_args_mode
[regno
] = VOIDmode
;
1236 apply_args_reg_offset
[regno
] = 0;
1242 /* Return the size required for the block returned by __builtin_apply,
1243 and initialize apply_result_mode. */
1246 apply_result_size (void)
1248 static int size
= -1;
1250 enum machine_mode mode
;
1252 /* The values computed by this function never change. */
1257 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1258 if (FUNCTION_VALUE_REGNO_P (regno
))
1260 mode
= reg_raw_mode
[regno
];
1262 gcc_assert (mode
!= VOIDmode
);
1264 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1265 if (size
% align
!= 0)
1266 size
= CEIL (size
, align
) * align
;
1267 size
+= GET_MODE_SIZE (mode
);
1268 apply_result_mode
[regno
] = mode
;
1271 apply_result_mode
[regno
] = VOIDmode
;
1273 /* Allow targets that use untyped_call and untyped_return to override
1274 the size so that machine-specific information can be stored here. */
1275 #ifdef APPLY_RESULT_SIZE
1276 size
= APPLY_RESULT_SIZE
;
1282 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1283 /* Create a vector describing the result block RESULT. If SAVEP is true,
1284 the result block is used to save the values; otherwise it is used to
1285 restore the values. */
1288 result_vector (int savep
, rtx result
)
1290 int regno
, size
, align
, nelts
;
1291 enum machine_mode mode
;
1293 rtx
*savevec
= alloca (FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
1296 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1297 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1299 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1300 if (size
% align
!= 0)
1301 size
= CEIL (size
, align
) * align
;
1302 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1303 mem
= adjust_address (result
, mode
, size
);
1304 savevec
[nelts
++] = (savep
1305 ? gen_rtx_SET (VOIDmode
, mem
, reg
)
1306 : gen_rtx_SET (VOIDmode
, reg
, mem
));
1307 size
+= GET_MODE_SIZE (mode
);
1309 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1311 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1313 /* Save the state required to perform an untyped call with the same
1314 arguments as were passed to the current function. */
1317 expand_builtin_apply_args_1 (void)
1320 int size
, align
, regno
;
1321 enum machine_mode mode
;
1322 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1324 /* Create a block where the arg-pointer, structure value address,
1325 and argument registers can be saved. */
1326 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1328 /* Walk past the arg-pointer and structure value address. */
1329 size
= GET_MODE_SIZE (Pmode
);
1330 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1331 size
+= GET_MODE_SIZE (Pmode
);
1333 /* Save each register used in calling a function to the block. */
1334 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1335 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1337 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1338 if (size
% align
!= 0)
1339 size
= CEIL (size
, align
) * align
;
1341 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1343 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1344 size
+= GET_MODE_SIZE (mode
);
1347 /* Save the arg pointer to the block. */
1348 tem
= copy_to_reg (virtual_incoming_args_rtx
);
1349 #ifdef STACK_GROWS_DOWNWARD
1350 /* We need the pointer as the caller actually passed them to us, not
1351 as we might have pretended they were passed. Make sure it's a valid
1352 operand, as emit_move_insn isn't expected to handle a PLUS. */
1354 = force_operand (plus_constant (tem
, current_function_pretend_args_size
),
1357 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1359 size
= GET_MODE_SIZE (Pmode
);
1361 /* Save the structure value address unless this is passed as an
1362 "invisible" first argument. */
1363 if (struct_incoming_value
)
1365 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1366 copy_to_reg (struct_incoming_value
));
1367 size
+= GET_MODE_SIZE (Pmode
);
1370 /* Return the address of the block. */
1371 return copy_addr_to_reg (XEXP (registers
, 0));
1374 /* __builtin_apply_args returns block of memory allocated on
1375 the stack into which is stored the arg pointer, structure
1376 value address, static chain, and all the registers that might
1377 possibly be used in performing a function call. The code is
1378 moved to the start of the function so the incoming values are
1382 expand_builtin_apply_args (void)
1384 /* Don't do __builtin_apply_args more than once in a function.
1385 Save the result of the first call and reuse it. */
1386 if (apply_args_value
!= 0)
1387 return apply_args_value
;
1389 /* When this function is called, it means that registers must be
1390 saved on entry to this function. So we migrate the
1391 call to the first insn of this function. */
1396 temp
= expand_builtin_apply_args_1 ();
1400 apply_args_value
= temp
;
1402 /* Put the insns after the NOTE that starts the function.
1403 If this is inside a start_sequence, make the outer-level insn
1404 chain current, so the code is placed at the start of the
1406 push_topmost_sequence ();
1407 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1408 pop_topmost_sequence ();
1413 /* Perform an untyped call and save the state required to perform an
1414 untyped return of whatever value was returned by the given function. */
1417 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1419 int size
, align
, regno
;
1420 enum machine_mode mode
;
1421 rtx incoming_args
, result
, reg
, dest
, src
, call_insn
;
1422 rtx old_stack_level
= 0;
1423 rtx call_fusage
= 0;
1424 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1426 arguments
= convert_memory_address (Pmode
, arguments
);
1428 /* Create a block where the return registers can be saved. */
1429 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1431 /* Fetch the arg pointer from the ARGUMENTS block. */
1432 incoming_args
= gen_reg_rtx (Pmode
);
1433 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1434 #ifndef STACK_GROWS_DOWNWARD
1435 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1436 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1439 /* Push a new argument block and copy the arguments. Do not allow
1440 the (potential) memcpy call below to interfere with our stack
1442 do_pending_stack_adjust ();
1445 /* Save the stack with nonlocal if available. */
1446 #ifdef HAVE_save_stack_nonlocal
1447 if (HAVE_save_stack_nonlocal
)
1448 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
, NULL_RTX
);
1451 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
1453 /* Allocate a block of memory onto the stack and copy the memory
1454 arguments to the outgoing arguments address. */
1455 allocate_dynamic_stack_space (argsize
, 0, BITS_PER_UNIT
);
1456 dest
= virtual_outgoing_args_rtx
;
1457 #ifndef STACK_GROWS_DOWNWARD
1458 if (GET_CODE (argsize
) == CONST_INT
)
1459 dest
= plus_constant (dest
, -INTVAL (argsize
));
1461 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1463 dest
= gen_rtx_MEM (BLKmode
, dest
);
1464 set_mem_align (dest
, PARM_BOUNDARY
);
1465 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1466 set_mem_align (src
, PARM_BOUNDARY
);
1467 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1469 /* Refer to the argument block. */
1471 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1472 set_mem_align (arguments
, PARM_BOUNDARY
);
1474 /* Walk past the arg-pointer and structure value address. */
1475 size
= GET_MODE_SIZE (Pmode
);
1477 size
+= GET_MODE_SIZE (Pmode
);
1479 /* Restore each of the registers previously saved. Make USE insns
1480 for each of these registers for use in making the call. */
1481 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1482 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1484 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1485 if (size
% align
!= 0)
1486 size
= CEIL (size
, align
) * align
;
1487 reg
= gen_rtx_REG (mode
, regno
);
1488 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1489 use_reg (&call_fusage
, reg
);
1490 size
+= GET_MODE_SIZE (mode
);
1493 /* Restore the structure value address unless this is passed as an
1494 "invisible" first argument. */
1495 size
= GET_MODE_SIZE (Pmode
);
1498 rtx value
= gen_reg_rtx (Pmode
);
1499 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1500 emit_move_insn (struct_value
, value
);
1501 if (REG_P (struct_value
))
1502 use_reg (&call_fusage
, struct_value
);
1503 size
+= GET_MODE_SIZE (Pmode
);
1506 /* All arguments and registers used for the call are set up by now! */
1507 function
= prepare_call_address (function
, NULL
, &call_fusage
, 0, 0);
1509 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1510 and we don't want to load it into a register as an optimization,
1511 because prepare_call_address already did it if it should be done. */
1512 if (GET_CODE (function
) != SYMBOL_REF
)
1513 function
= memory_address (FUNCTION_MODE
, function
);
1515 /* Generate the actual call instruction and save the return value. */
1516 #ifdef HAVE_untyped_call
1517 if (HAVE_untyped_call
)
1518 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE
, function
),
1519 result
, result_vector (1, result
)));
1522 #ifdef HAVE_call_value
1523 if (HAVE_call_value
)
1527 /* Locate the unique return register. It is not possible to
1528 express a call that sets more than one return register using
1529 call_value; use untyped_call for that. In fact, untyped_call
1530 only needs to save the return registers in the given block. */
1531 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1532 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1534 gcc_assert (!valreg
); /* HAVE_untyped_call required. */
1536 valreg
= gen_rtx_REG (mode
, regno
);
1539 emit_call_insn (GEN_CALL_VALUE (valreg
,
1540 gen_rtx_MEM (FUNCTION_MODE
, function
),
1541 const0_rtx
, NULL_RTX
, const0_rtx
));
1543 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1549 /* Find the CALL insn we just emitted, and attach the register usage
1551 call_insn
= last_call_insn ();
1552 add_function_usage_to (call_insn
, call_fusage
);
1554 /* Restore the stack. */
1555 #ifdef HAVE_save_stack_nonlocal
1556 if (HAVE_save_stack_nonlocal
)
1557 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
, NULL_RTX
);
1560 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
1564 /* Return the address of the result block. */
1565 result
= copy_addr_to_reg (XEXP (result
, 0));
1566 return convert_memory_address (ptr_mode
, result
);
1569 /* Perform an untyped return. */
1572 expand_builtin_return (rtx result
)
1574 int size
, align
, regno
;
1575 enum machine_mode mode
;
1577 rtx call_fusage
= 0;
1579 result
= convert_memory_address (Pmode
, result
);
1581 apply_result_size ();
1582 result
= gen_rtx_MEM (BLKmode
, result
);
1584 #ifdef HAVE_untyped_return
1585 if (HAVE_untyped_return
)
1587 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
1593 /* Restore the return value and note that each value is used. */
1595 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1596 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1598 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1599 if (size
% align
!= 0)
1600 size
= CEIL (size
, align
) * align
;
1601 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1602 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1604 push_to_sequence (call_fusage
);
1605 emit_insn (gen_rtx_USE (VOIDmode
, reg
));
1606 call_fusage
= get_insns ();
1608 size
+= GET_MODE_SIZE (mode
);
1611 /* Put the USE insns before the return. */
1612 emit_insn (call_fusage
);
1614 /* Return whatever values was restored by jumping directly to the end
1616 expand_naked_return ();
1619 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1621 static enum type_class
1622 type_to_class (tree type
)
1624 switch (TREE_CODE (type
))
1626 case VOID_TYPE
: return void_type_class
;
1627 case INTEGER_TYPE
: return integer_type_class
;
1628 case ENUMERAL_TYPE
: return enumeral_type_class
;
1629 case BOOLEAN_TYPE
: return boolean_type_class
;
1630 case POINTER_TYPE
: return pointer_type_class
;
1631 case REFERENCE_TYPE
: return reference_type_class
;
1632 case OFFSET_TYPE
: return offset_type_class
;
1633 case REAL_TYPE
: return real_type_class
;
1634 case COMPLEX_TYPE
: return complex_type_class
;
1635 case FUNCTION_TYPE
: return function_type_class
;
1636 case METHOD_TYPE
: return method_type_class
;
1637 case RECORD_TYPE
: return record_type_class
;
1639 case QUAL_UNION_TYPE
: return union_type_class
;
1640 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1641 ? string_type_class
: array_type_class
);
1642 case LANG_TYPE
: return lang_type_class
;
1643 default: return no_type_class
;
1647 /* Expand a call EXP to __builtin_classify_type. */
1650 expand_builtin_classify_type (tree exp
)
1652 if (call_expr_nargs (exp
))
1653 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1654 return GEN_INT (no_type_class
);
1657 /* This helper macro, meant to be used in mathfn_built_in below,
1658 determines which among a set of three builtin math functions is
1659 appropriate for a given type mode. The `F' and `L' cases are
1660 automatically generated from the `double' case. */
1661 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1662 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1663 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1664 fcodel = BUILT_IN_MATHFN##L ; break;
1665 /* Similar to above, but appends _R after any F/L suffix. */
1666 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1667 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1668 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1669 fcodel = BUILT_IN_MATHFN##L_R ; break;
1671 /* Return mathematic function equivalent to FN but operating directly
1672 on TYPE, if available. If we can't do the conversion, return zero. */
1674 mathfn_built_in (tree type
, enum built_in_function fn
)
1676 enum built_in_function fcode
, fcodef
, fcodel
;
1680 CASE_MATHFN (BUILT_IN_ACOS
)
1681 CASE_MATHFN (BUILT_IN_ACOSH
)
1682 CASE_MATHFN (BUILT_IN_ASIN
)
1683 CASE_MATHFN (BUILT_IN_ASINH
)
1684 CASE_MATHFN (BUILT_IN_ATAN
)
1685 CASE_MATHFN (BUILT_IN_ATAN2
)
1686 CASE_MATHFN (BUILT_IN_ATANH
)
1687 CASE_MATHFN (BUILT_IN_CBRT
)
1688 CASE_MATHFN (BUILT_IN_CEIL
)
1689 CASE_MATHFN (BUILT_IN_CEXPI
)
1690 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1691 CASE_MATHFN (BUILT_IN_COS
)
1692 CASE_MATHFN (BUILT_IN_COSH
)
1693 CASE_MATHFN (BUILT_IN_DREM
)
1694 CASE_MATHFN (BUILT_IN_ERF
)
1695 CASE_MATHFN (BUILT_IN_ERFC
)
1696 CASE_MATHFN (BUILT_IN_EXP
)
1697 CASE_MATHFN (BUILT_IN_EXP10
)
1698 CASE_MATHFN (BUILT_IN_EXP2
)
1699 CASE_MATHFN (BUILT_IN_EXPM1
)
1700 CASE_MATHFN (BUILT_IN_FABS
)
1701 CASE_MATHFN (BUILT_IN_FDIM
)
1702 CASE_MATHFN (BUILT_IN_FLOOR
)
1703 CASE_MATHFN (BUILT_IN_FMA
)
1704 CASE_MATHFN (BUILT_IN_FMAX
)
1705 CASE_MATHFN (BUILT_IN_FMIN
)
1706 CASE_MATHFN (BUILT_IN_FMOD
)
1707 CASE_MATHFN (BUILT_IN_FREXP
)
1708 CASE_MATHFN (BUILT_IN_GAMMA
)
1709 CASE_MATHFN_REENT (BUILT_IN_GAMMA
) /* GAMMA_R */
1710 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1711 CASE_MATHFN (BUILT_IN_HYPOT
)
1712 CASE_MATHFN (BUILT_IN_ILOGB
)
1713 CASE_MATHFN (BUILT_IN_INF
)
1714 CASE_MATHFN (BUILT_IN_ISINF
)
1715 CASE_MATHFN (BUILT_IN_J0
)
1716 CASE_MATHFN (BUILT_IN_J1
)
1717 CASE_MATHFN (BUILT_IN_JN
)
1718 CASE_MATHFN (BUILT_IN_LCEIL
)
1719 CASE_MATHFN (BUILT_IN_LDEXP
)
1720 CASE_MATHFN (BUILT_IN_LFLOOR
)
1721 CASE_MATHFN (BUILT_IN_LGAMMA
)
1722 CASE_MATHFN_REENT (BUILT_IN_LGAMMA
) /* LGAMMA_R */
1723 CASE_MATHFN (BUILT_IN_LLCEIL
)
1724 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1725 CASE_MATHFN (BUILT_IN_LLRINT
)
1726 CASE_MATHFN (BUILT_IN_LLROUND
)
1727 CASE_MATHFN (BUILT_IN_LOG
)
1728 CASE_MATHFN (BUILT_IN_LOG10
)
1729 CASE_MATHFN (BUILT_IN_LOG1P
)
1730 CASE_MATHFN (BUILT_IN_LOG2
)
1731 CASE_MATHFN (BUILT_IN_LOGB
)
1732 CASE_MATHFN (BUILT_IN_LRINT
)
1733 CASE_MATHFN (BUILT_IN_LROUND
)
1734 CASE_MATHFN (BUILT_IN_MODF
)
1735 CASE_MATHFN (BUILT_IN_NAN
)
1736 CASE_MATHFN (BUILT_IN_NANS
)
1737 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1738 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1739 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1740 CASE_MATHFN (BUILT_IN_POW
)
1741 CASE_MATHFN (BUILT_IN_POWI
)
1742 CASE_MATHFN (BUILT_IN_POW10
)
1743 CASE_MATHFN (BUILT_IN_REMAINDER
)
1744 CASE_MATHFN (BUILT_IN_REMQUO
)
1745 CASE_MATHFN (BUILT_IN_RINT
)
1746 CASE_MATHFN (BUILT_IN_ROUND
)
1747 CASE_MATHFN (BUILT_IN_SCALB
)
1748 CASE_MATHFN (BUILT_IN_SCALBLN
)
1749 CASE_MATHFN (BUILT_IN_SCALBN
)
1750 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1751 CASE_MATHFN (BUILT_IN_SIN
)
1752 CASE_MATHFN (BUILT_IN_SINCOS
)
1753 CASE_MATHFN (BUILT_IN_SINH
)
1754 CASE_MATHFN (BUILT_IN_SQRT
)
1755 CASE_MATHFN (BUILT_IN_TAN
)
1756 CASE_MATHFN (BUILT_IN_TANH
)
1757 CASE_MATHFN (BUILT_IN_TGAMMA
)
1758 CASE_MATHFN (BUILT_IN_TRUNC
)
1759 CASE_MATHFN (BUILT_IN_Y0
)
1760 CASE_MATHFN (BUILT_IN_Y1
)
1761 CASE_MATHFN (BUILT_IN_YN
)
1767 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1768 return implicit_built_in_decls
[fcode
];
1769 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1770 return implicit_built_in_decls
[fcodef
];
1771 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1772 return implicit_built_in_decls
[fcodel
];
1777 /* If errno must be maintained, expand the RTL to check if the result,
1778 TARGET, of a built-in function call, EXP, is NaN, and if so set
1782 expand_errno_check (tree exp
, rtx target
)
1784 rtx lab
= gen_label_rtx ();
1786 /* Test the result; if it is NaN, set errno=EDOM because
1787 the argument was not in the domain. */
1788 emit_cmp_and_jump_insns (target
, target
, EQ
, 0, GET_MODE (target
),
1792 /* If this built-in doesn't throw an exception, set errno directly. */
1793 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
1795 #ifdef GEN_ERRNO_RTX
1796 rtx errno_rtx
= GEN_ERRNO_RTX
;
1799 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
1801 emit_move_insn (errno_rtx
, GEN_INT (TARGET_EDOM
));
1807 /* We can't set errno=EDOM directly; let the library call do it.
1808 Pop the arguments right away in case the call gets deleted. */
1810 expand_call (exp
, target
, 0);
1815 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1816 Return NULL_RTX if a normal call should be emitted rather than expanding
1817 the function in-line. EXP is the expression that is a call to the builtin
1818 function; if convenient, the result should be placed in TARGET.
1819 SUBTARGET may be used as the target for computing one of EXP's operands. */
1822 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
1824 optab builtin_optab
;
1825 rtx op0
, insns
, before_call
;
1826 tree fndecl
= get_callee_fndecl (exp
);
1827 enum machine_mode mode
;
1828 bool errno_set
= false;
1831 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
1834 arg
= CALL_EXPR_ARG (exp
, 0);
1836 switch (DECL_FUNCTION_CODE (fndecl
))
1838 CASE_FLT_FN (BUILT_IN_SQRT
):
1839 errno_set
= ! tree_expr_nonnegative_p (arg
);
1840 builtin_optab
= sqrt_optab
;
1842 CASE_FLT_FN (BUILT_IN_EXP
):
1843 errno_set
= true; builtin_optab
= exp_optab
; break;
1844 CASE_FLT_FN (BUILT_IN_EXP10
):
1845 CASE_FLT_FN (BUILT_IN_POW10
):
1846 errno_set
= true; builtin_optab
= exp10_optab
; break;
1847 CASE_FLT_FN (BUILT_IN_EXP2
):
1848 errno_set
= true; builtin_optab
= exp2_optab
; break;
1849 CASE_FLT_FN (BUILT_IN_EXPM1
):
1850 errno_set
= true; builtin_optab
= expm1_optab
; break;
1851 CASE_FLT_FN (BUILT_IN_LOGB
):
1852 errno_set
= true; builtin_optab
= logb_optab
; break;
1853 CASE_FLT_FN (BUILT_IN_LOG
):
1854 errno_set
= true; builtin_optab
= log_optab
; break;
1855 CASE_FLT_FN (BUILT_IN_LOG10
):
1856 errno_set
= true; builtin_optab
= log10_optab
; break;
1857 CASE_FLT_FN (BUILT_IN_LOG2
):
1858 errno_set
= true; builtin_optab
= log2_optab
; break;
1859 CASE_FLT_FN (BUILT_IN_LOG1P
):
1860 errno_set
= true; builtin_optab
= log1p_optab
; break;
1861 CASE_FLT_FN (BUILT_IN_ASIN
):
1862 builtin_optab
= asin_optab
; break;
1863 CASE_FLT_FN (BUILT_IN_ACOS
):
1864 builtin_optab
= acos_optab
; break;
1865 CASE_FLT_FN (BUILT_IN_TAN
):
1866 builtin_optab
= tan_optab
; break;
1867 CASE_FLT_FN (BUILT_IN_ATAN
):
1868 builtin_optab
= atan_optab
; break;
1869 CASE_FLT_FN (BUILT_IN_FLOOR
):
1870 builtin_optab
= floor_optab
; break;
1871 CASE_FLT_FN (BUILT_IN_CEIL
):
1872 builtin_optab
= ceil_optab
; break;
1873 CASE_FLT_FN (BUILT_IN_TRUNC
):
1874 builtin_optab
= btrunc_optab
; break;
1875 CASE_FLT_FN (BUILT_IN_ROUND
):
1876 builtin_optab
= round_optab
; break;
1877 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
1878 builtin_optab
= nearbyint_optab
;
1879 if (flag_trapping_math
)
1881 /* Else fallthrough and expand as rint. */
1882 CASE_FLT_FN (BUILT_IN_RINT
):
1883 builtin_optab
= rint_optab
; break;
1888 /* Make a suitable register to place result in. */
1889 mode
= TYPE_MODE (TREE_TYPE (exp
));
1891 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
1894 /* Before working hard, check whether the instruction is available. */
1895 if (optab_handler (builtin_optab
, mode
)->insn_code
!= CODE_FOR_nothing
)
1897 target
= gen_reg_rtx (mode
);
1899 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1900 need to expand the argument again. This way, we will not perform
1901 side-effects more the once. */
1902 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
1904 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
1908 /* Compute into TARGET.
1909 Set TARGET to wherever the result comes back. */
1910 target
= expand_unop (mode
, builtin_optab
, op0
, target
, 0);
1915 expand_errno_check (exp
, target
);
1917 /* Output the entire sequence. */
1918 insns
= get_insns ();
1924 /* If we were unable to expand via the builtin, stop the sequence
1925 (without outputting the insns) and call to the library function
1926 with the stabilized argument list. */
1930 before_call
= get_last_insn ();
1932 target
= expand_call (exp
, target
, target
== const0_rtx
);
1934 /* If this is a sqrt operation and we don't care about errno, try to
1935 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1936 This allows the semantics of the libcall to be visible to the RTL
1938 if (builtin_optab
== sqrt_optab
&& !errno_set
)
1940 /* Search backwards through the insns emitted by expand_call looking
1941 for the instruction with the REG_RETVAL note. */
1942 rtx last
= get_last_insn ();
1943 while (last
!= before_call
)
1945 if (find_reg_note (last
, REG_RETVAL
, NULL
))
1947 rtx note
= find_reg_note (last
, REG_EQUAL
, NULL
);
1948 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1949 two elements, i.e. symbol_ref(sqrt) and the operand. */
1951 && GET_CODE (note
) == EXPR_LIST
1952 && GET_CODE (XEXP (note
, 0)) == EXPR_LIST
1953 && XEXP (XEXP (note
, 0), 1) != NULL_RTX
1954 && XEXP (XEXP (XEXP (note
, 0), 1), 1) == NULL_RTX
)
1956 rtx operand
= XEXP (XEXP (XEXP (note
, 0), 1), 0);
1957 /* Check operand is a register with expected mode. */
1960 && GET_MODE (operand
) == mode
)
1962 /* Replace the REG_EQUAL note with a SQRT rtx. */
1963 rtx equiv
= gen_rtx_SQRT (mode
, operand
);
1964 set_unique_reg_note (last
, REG_EQUAL
, equiv
);
1969 last
= PREV_INSN (last
);
1976 /* Expand a call to the builtin binary math functions (pow and atan2).
1977 Return NULL_RTX if a normal call should be emitted rather than expanding the
1978 function in-line. EXP is the expression that is a call to the builtin
1979 function; if convenient, the result should be placed in TARGET.
1980 SUBTARGET may be used as the target for computing one of EXP's
1984 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
1986 optab builtin_optab
;
1987 rtx op0
, op1
, insns
;
1988 int op1_type
= REAL_TYPE
;
1989 tree fndecl
= get_callee_fndecl (exp
);
1991 enum machine_mode mode
;
1992 bool errno_set
= true;
1994 switch (DECL_FUNCTION_CODE (fndecl
))
1996 CASE_FLT_FN (BUILT_IN_SCALBN
):
1997 CASE_FLT_FN (BUILT_IN_SCALBLN
):
1998 CASE_FLT_FN (BUILT_IN_LDEXP
):
1999 op1_type
= INTEGER_TYPE
;
2004 if (!validate_arglist (exp
, REAL_TYPE
, op1_type
, VOID_TYPE
))
2007 arg0
= CALL_EXPR_ARG (exp
, 0);
2008 arg1
= CALL_EXPR_ARG (exp
, 1);
2010 switch (DECL_FUNCTION_CODE (fndecl
))
2012 CASE_FLT_FN (BUILT_IN_POW
):
2013 builtin_optab
= pow_optab
; break;
2014 CASE_FLT_FN (BUILT_IN_ATAN2
):
2015 builtin_optab
= atan2_optab
; break;
2016 CASE_FLT_FN (BUILT_IN_SCALB
):
2017 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2019 builtin_optab
= scalb_optab
; break;
2020 CASE_FLT_FN (BUILT_IN_SCALBN
):
2021 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2022 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2024 /* Fall through... */
2025 CASE_FLT_FN (BUILT_IN_LDEXP
):
2026 builtin_optab
= ldexp_optab
; break;
2027 CASE_FLT_FN (BUILT_IN_FMOD
):
2028 builtin_optab
= fmod_optab
; break;
2029 CASE_FLT_FN (BUILT_IN_REMAINDER
):
2030 CASE_FLT_FN (BUILT_IN_DREM
):
2031 builtin_optab
= remainder_optab
; break;
2036 /* Make a suitable register to place result in. */
2037 mode
= TYPE_MODE (TREE_TYPE (exp
));
2039 /* Before working hard, check whether the instruction is available. */
2040 if (optab_handler (builtin_optab
, mode
)->insn_code
== CODE_FOR_nothing
)
2043 target
= gen_reg_rtx (mode
);
2045 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2048 /* Always stabilize the argument list. */
2049 CALL_EXPR_ARG (exp
, 0) = arg0
= builtin_save_expr (arg0
);
2050 CALL_EXPR_ARG (exp
, 1) = arg1
= builtin_save_expr (arg1
);
2052 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2053 op1
= expand_normal (arg1
);
2057 /* Compute into TARGET.
2058 Set TARGET to wherever the result comes back. */
2059 target
= expand_binop (mode
, builtin_optab
, op0
, op1
,
2060 target
, 0, OPTAB_DIRECT
);
2062 /* If we were unable to expand via the builtin, stop the sequence
2063 (without outputting the insns) and call to the library function
2064 with the stabilized argument list. */
2068 return expand_call (exp
, target
, target
== const0_rtx
);
2072 expand_errno_check (exp
, target
);
2074 /* Output the entire sequence. */
2075 insns
= get_insns ();
2082 /* Expand a call to the builtin sin and cos math functions.
2083 Return NULL_RTX if a normal call should be emitted rather than expanding the
2084 function in-line. EXP is the expression that is a call to the builtin
2085 function; if convenient, the result should be placed in TARGET.
2086 SUBTARGET may be used as the target for computing one of EXP's
2090 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2092 optab builtin_optab
;
2094 tree fndecl
= get_callee_fndecl (exp
);
2095 enum machine_mode mode
;
2098 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2101 arg
= CALL_EXPR_ARG (exp
, 0);
2103 switch (DECL_FUNCTION_CODE (fndecl
))
2105 CASE_FLT_FN (BUILT_IN_SIN
):
2106 CASE_FLT_FN (BUILT_IN_COS
):
2107 builtin_optab
= sincos_optab
; break;
2112 /* Make a suitable register to place result in. */
2113 mode
= TYPE_MODE (TREE_TYPE (exp
));
2115 /* Check if sincos insn is available, otherwise fallback
2116 to sin or cos insn. */
2117 if (optab_handler (builtin_optab
, mode
)->insn_code
== CODE_FOR_nothing
)
2118 switch (DECL_FUNCTION_CODE (fndecl
))
2120 CASE_FLT_FN (BUILT_IN_SIN
):
2121 builtin_optab
= sin_optab
; break;
2122 CASE_FLT_FN (BUILT_IN_COS
):
2123 builtin_optab
= cos_optab
; break;
2128 /* Before working hard, check whether the instruction is available. */
2129 if (optab_handler (builtin_optab
, mode
)->insn_code
!= CODE_FOR_nothing
)
2131 target
= gen_reg_rtx (mode
);
2133 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2134 need to expand the argument again. This way, we will not perform
2135 side-effects more the once. */
2136 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2138 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2142 /* Compute into TARGET.
2143 Set TARGET to wherever the result comes back. */
2144 if (builtin_optab
== sincos_optab
)
2148 switch (DECL_FUNCTION_CODE (fndecl
))
2150 CASE_FLT_FN (BUILT_IN_SIN
):
2151 result
= expand_twoval_unop (builtin_optab
, op0
, 0, target
, 0);
2153 CASE_FLT_FN (BUILT_IN_COS
):
2154 result
= expand_twoval_unop (builtin_optab
, op0
, target
, 0, 0);
2159 gcc_assert (result
);
2163 target
= expand_unop (mode
, builtin_optab
, op0
, target
, 0);
2168 /* Output the entire sequence. */
2169 insns
= get_insns ();
2175 /* If we were unable to expand via the builtin, stop the sequence
2176 (without outputting the insns) and call to the library function
2177 with the stabilized argument list. */
2181 target
= expand_call (exp
, target
, target
== const0_rtx
);
2186 /* Expand a call to one of the builtin math functions that operate on
2187 floating point argument and output an integer result (ilogb, isinf,
2189 Return 0 if a normal call should be emitted rather than expanding the
2190 function in-line. EXP is the expression that is a call to the builtin
2191 function; if convenient, the result should be placed in TARGET.
2192 SUBTARGET may be used as the target for computing one of EXP's operands. */
2195 expand_builtin_interclass_mathfn (tree exp
, rtx target
, rtx subtarget
)
2197 optab builtin_optab
= 0;
2198 enum insn_code icode
= CODE_FOR_nothing
;
2200 tree fndecl
= get_callee_fndecl (exp
);
2201 enum machine_mode mode
;
2202 bool errno_set
= false;
2205 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2208 arg
= CALL_EXPR_ARG (exp
, 0);
2210 switch (DECL_FUNCTION_CODE (fndecl
))
2212 CASE_FLT_FN (BUILT_IN_ILOGB
):
2213 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2214 CASE_FLT_FN (BUILT_IN_ISINF
):
2215 builtin_optab
= isinf_optab
; break;
2216 case BUILT_IN_ISNORMAL
:
2217 case BUILT_IN_ISFINITE
:
2218 CASE_FLT_FN (BUILT_IN_FINITE
):
2219 /* These builtins have no optabs (yet). */
2225 /* There's no easy way to detect the case we need to set EDOM. */
2226 if (flag_errno_math
&& errno_set
)
2229 /* Optab mode depends on the mode of the input argument. */
2230 mode
= TYPE_MODE (TREE_TYPE (arg
));
2233 icode
= optab_handler (builtin_optab
, mode
)->insn_code
;
2235 /* Before working hard, check whether the instruction is available. */
2236 if (icode
!= CODE_FOR_nothing
)
2238 /* Make a suitable register to place result in. */
2240 || GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
2241 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
2243 gcc_assert (insn_data
[icode
].operand
[0].predicate
2244 (target
, GET_MODE (target
)));
2246 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2247 need to expand the argument again. This way, we will not perform
2248 side-effects more the once. */
2249 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2251 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2253 if (mode
!= GET_MODE (op0
))
2254 op0
= convert_to_mode (mode
, op0
, 0);
2256 /* Compute into TARGET.
2257 Set TARGET to wherever the result comes back. */
2258 emit_unop_insn (icode
, target
, op0
, UNKNOWN
);
2262 /* If there is no optab, try generic code. */
2263 switch (DECL_FUNCTION_CODE (fndecl
))
2267 CASE_FLT_FN (BUILT_IN_ISINF
):
2269 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
2270 tree
const isgr_fn
= built_in_decls
[BUILT_IN_ISGREATER
];
2271 tree
const type
= TREE_TYPE (arg
);
2275 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
2276 real_from_string (&r
, buf
);
2277 result
= build_call_expr (isgr_fn
, 2,
2278 fold_build1 (ABS_EXPR
, type
, arg
),
2279 build_real (type
, r
));
2280 return expand_expr (result
, target
, VOIDmode
, EXPAND_NORMAL
);
2282 CASE_FLT_FN (BUILT_IN_FINITE
):
2283 case BUILT_IN_ISFINITE
:
2285 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
2286 tree
const isle_fn
= built_in_decls
[BUILT_IN_ISLESSEQUAL
];
2287 tree
const type
= TREE_TYPE (arg
);
2291 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
2292 real_from_string (&r
, buf
);
2293 result
= build_call_expr (isle_fn
, 2,
2294 fold_build1 (ABS_EXPR
, type
, arg
),
2295 build_real (type
, r
));
2296 return expand_expr (result
, target
, VOIDmode
, EXPAND_NORMAL
);
2298 case BUILT_IN_ISNORMAL
:
2300 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
2301 islessequal(fabs(x),DBL_MAX). */
2302 tree
const isle_fn
= built_in_decls
[BUILT_IN_ISLESSEQUAL
];
2303 tree
const isge_fn
= built_in_decls
[BUILT_IN_ISGREATEREQUAL
];
2304 tree
const type
= TREE_TYPE (arg
);
2305 REAL_VALUE_TYPE rmax
, rmin
;
2308 get_max_float (REAL_MODE_FORMAT (mode
), buf
, sizeof (buf
));
2309 real_from_string (&rmax
, buf
);
2310 sprintf (buf
, "0x1p%d", REAL_MODE_FORMAT (mode
)->emin
- 1);
2311 real_from_string (&rmin
, buf
);
2312 arg
= builtin_save_expr (fold_build1 (ABS_EXPR
, type
, arg
));
2313 result
= build_call_expr (isle_fn
, 2, arg
,
2314 build_real (type
, rmax
));
2315 result
= fold_build2 (BIT_AND_EXPR
, integer_type_node
, result
,
2316 build_call_expr (isge_fn
, 2, arg
,
2317 build_real (type
, rmin
)));
2318 return expand_expr (result
, target
, VOIDmode
, EXPAND_NORMAL
);
2324 target
= expand_call (exp
, target
, target
== const0_rtx
);
2329 /* Expand a call to the builtin sincos math function.
2330 Return NULL_RTX if a normal call should be emitted rather than expanding the
2331 function in-line. EXP is the expression that is a call to the builtin
2335 expand_builtin_sincos (tree exp
)
2337 rtx op0
, op1
, op2
, target1
, target2
;
2338 enum machine_mode mode
;
2339 tree arg
, sinp
, cosp
;
2342 if (!validate_arglist (exp
, REAL_TYPE
,
2343 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2346 arg
= CALL_EXPR_ARG (exp
, 0);
2347 sinp
= CALL_EXPR_ARG (exp
, 1);
2348 cosp
= CALL_EXPR_ARG (exp
, 2);
2350 /* Make a suitable register to place result in. */
2351 mode
= TYPE_MODE (TREE_TYPE (arg
));
2353 /* Check if sincos insn is available, otherwise emit the call. */
2354 if (optab_handler (sincos_optab
, mode
)->insn_code
== CODE_FOR_nothing
)
2357 target1
= gen_reg_rtx (mode
);
2358 target2
= gen_reg_rtx (mode
);
2360 op0
= expand_normal (arg
);
2361 op1
= expand_normal (build_fold_indirect_ref (sinp
));
2362 op2
= expand_normal (build_fold_indirect_ref (cosp
));
2364 /* Compute into target1 and target2.
2365 Set TARGET to wherever the result comes back. */
2366 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2367 gcc_assert (result
);
2369 /* Move target1 and target2 to the memory locations indicated
2371 emit_move_insn (op1
, target1
);
2372 emit_move_insn (op2
, target2
);
2377 /* Expand a call to the internal cexpi builtin to the sincos math function.
2378 EXP is the expression that is a call to the builtin function; if convenient,
2379 the result should be placed in TARGET. SUBTARGET may be used as the target
2380 for computing one of EXP's operands. */
2383 expand_builtin_cexpi (tree exp
, rtx target
, rtx subtarget
)
2385 tree fndecl
= get_callee_fndecl (exp
);
2387 enum machine_mode mode
;
2390 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2393 arg
= CALL_EXPR_ARG (exp
, 0);
2394 type
= TREE_TYPE (arg
);
2395 mode
= TYPE_MODE (TREE_TYPE (arg
));
2397 /* Try expanding via a sincos optab, fall back to emitting a libcall
2398 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2399 is only generated from sincos, cexp or if we have either of them. */
2400 if (optab_handler (sincos_optab
, mode
)->insn_code
!= CODE_FOR_nothing
)
2402 op1
= gen_reg_rtx (mode
);
2403 op2
= gen_reg_rtx (mode
);
2405 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2407 /* Compute into op1 and op2. */
2408 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2410 else if (TARGET_HAS_SINCOS
)
2412 tree call
, fn
= NULL_TREE
;
2416 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2417 fn
= built_in_decls
[BUILT_IN_SINCOSF
];
2418 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2419 fn
= built_in_decls
[BUILT_IN_SINCOS
];
2420 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2421 fn
= built_in_decls
[BUILT_IN_SINCOSL
];
2425 op1
= assign_temp (TREE_TYPE (arg
), 0, 1, 1);
2426 op2
= assign_temp (TREE_TYPE (arg
), 0, 1, 1);
2427 op1a
= copy_to_mode_reg (Pmode
, XEXP (op1
, 0));
2428 op2a
= copy_to_mode_reg (Pmode
, XEXP (op2
, 0));
2429 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2430 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2432 /* Make sure not to fold the sincos call again. */
2433 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2434 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2435 call
, 3, arg
, top1
, top2
));
2439 tree call
, fn
= NULL_TREE
, narg
;
2440 tree ctype
= build_complex_type (type
);
2442 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2443 fn
= built_in_decls
[BUILT_IN_CEXPF
];
2444 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2445 fn
= built_in_decls
[BUILT_IN_CEXP
];
2446 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2447 fn
= built_in_decls
[BUILT_IN_CEXPL
];
2451 /* If we don't have a decl for cexp create one. This is the
2452 friendliest fallback if the user calls __builtin_cexpi
2453 without full target C99 function support. */
2454 if (fn
== NULL_TREE
)
2457 const char *name
= NULL
;
2459 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2461 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2463 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2466 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2467 fn
= build_fn_decl (name
, fntype
);
2470 narg
= fold_build2 (COMPLEX_EXPR
, ctype
,
2471 build_real (type
, dconst0
), arg
);
2473 /* Make sure not to fold the cexp call again. */
2474 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2475 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2476 target
, VOIDmode
, EXPAND_NORMAL
);
2479 /* Now build the proper return type. */
2480 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2481 make_tree (TREE_TYPE (arg
), op2
),
2482 make_tree (TREE_TYPE (arg
), op1
)),
2483 target
, VOIDmode
, EXPAND_NORMAL
);
2486 /* Expand a call to one of the builtin rounding functions gcc defines
2487 as an extension (lfloor and lceil). As these are gcc extensions we
2488 do not need to worry about setting errno to EDOM.
2489 If expanding via optab fails, lower expression to (int)(floor(x)).
2490 EXP is the expression that is a call to the builtin function;
2491 if convenient, the result should be placed in TARGET. SUBTARGET may
2492 be used as the target for computing one of EXP's operands. */
2495 expand_builtin_int_roundingfn (tree exp
, rtx target
, rtx subtarget
)
2497 convert_optab builtin_optab
;
2498 rtx op0
, insns
, tmp
;
2499 tree fndecl
= get_callee_fndecl (exp
);
2500 enum built_in_function fallback_fn
;
2501 tree fallback_fndecl
;
2502 enum machine_mode mode
;
2505 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2508 arg
= CALL_EXPR_ARG (exp
, 0);
2510 switch (DECL_FUNCTION_CODE (fndecl
))
2512 CASE_FLT_FN (BUILT_IN_LCEIL
):
2513 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2514 builtin_optab
= lceil_optab
;
2515 fallback_fn
= BUILT_IN_CEIL
;
2518 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2519 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2520 builtin_optab
= lfloor_optab
;
2521 fallback_fn
= BUILT_IN_FLOOR
;
2528 /* Make a suitable register to place result in. */
2529 mode
= TYPE_MODE (TREE_TYPE (exp
));
2531 target
= gen_reg_rtx (mode
);
2533 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2534 need to expand the argument again. This way, we will not perform
2535 side-effects more the once. */
2536 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2538 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2542 /* Compute into TARGET. */
2543 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2545 /* Output the entire sequence. */
2546 insns
= get_insns ();
2552 /* If we were unable to expand via the builtin, stop the sequence
2553 (without outputting the insns). */
2556 /* Fall back to floating point rounding optab. */
2557 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2559 /* For non-C99 targets we may end up without a fallback fndecl here
2560 if the user called __builtin_lfloor directly. In this case emit
2561 a call to the floor/ceil variants nevertheless. This should result
2562 in the best user experience for not full C99 targets. */
2563 if (fallback_fndecl
== NULL_TREE
)
2566 const char *name
= NULL
;
2568 switch (DECL_FUNCTION_CODE (fndecl
))
2570 case BUILT_IN_LCEIL
:
2571 case BUILT_IN_LLCEIL
:
2574 case BUILT_IN_LCEILF
:
2575 case BUILT_IN_LLCEILF
:
2578 case BUILT_IN_LCEILL
:
2579 case BUILT_IN_LLCEILL
:
2582 case BUILT_IN_LFLOOR
:
2583 case BUILT_IN_LLFLOOR
:
2586 case BUILT_IN_LFLOORF
:
2587 case BUILT_IN_LLFLOORF
:
2590 case BUILT_IN_LFLOORL
:
2591 case BUILT_IN_LLFLOORL
:
2598 fntype
= build_function_type_list (TREE_TYPE (arg
),
2599 TREE_TYPE (arg
), NULL_TREE
);
2600 fallback_fndecl
= build_fn_decl (name
, fntype
);
2603 exp
= build_call_expr (fallback_fndecl
, 1, arg
);
2605 tmp
= expand_normal (exp
);
2607 /* Truncate the result of floating point optab to integer
2608 via expand_fix (). */
2609 target
= gen_reg_rtx (mode
);
2610 expand_fix (target
, tmp
, 0);
2615 /* Expand a call to one of the builtin math functions doing integer
2617 Return 0 if a normal call should be emitted rather than expanding the
2618 function in-line. EXP is the expression that is a call to the builtin
2619 function; if convenient, the result should be placed in TARGET.
2620 SUBTARGET may be used as the target for computing one of EXP's operands. */
2623 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
, rtx subtarget
)
2625 convert_optab builtin_optab
;
2627 tree fndecl
= get_callee_fndecl (exp
);
2629 enum machine_mode mode
;
2631 /* There's no easy way to detect the case we need to set EDOM. */
2632 if (flag_errno_math
)
2635 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2638 arg
= CALL_EXPR_ARG (exp
, 0);
2640 switch (DECL_FUNCTION_CODE (fndecl
))
2642 CASE_FLT_FN (BUILT_IN_LRINT
):
2643 CASE_FLT_FN (BUILT_IN_LLRINT
):
2644 builtin_optab
= lrint_optab
; break;
2645 CASE_FLT_FN (BUILT_IN_LROUND
):
2646 CASE_FLT_FN (BUILT_IN_LLROUND
):
2647 builtin_optab
= lround_optab
; break;
2652 /* Make a suitable register to place result in. */
2653 mode
= TYPE_MODE (TREE_TYPE (exp
));
2655 target
= gen_reg_rtx (mode
);
2657 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2658 need to expand the argument again. This way, we will not perform
2659 side-effects more the once. */
2660 CALL_EXPR_ARG (exp
, 0) = arg
= builtin_save_expr (arg
);
2662 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2666 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2668 /* Output the entire sequence. */
2669 insns
= get_insns ();
2675 /* If we were unable to expand via the builtin, stop the sequence
2676 (without outputting the insns) and call to the library function
2677 with the stabilized argument list. */
2680 target
= expand_call (exp
, target
, target
== const0_rtx
);
2685 /* To evaluate powi(x,n), the floating point value x raised to the
2686 constant integer exponent n, we use a hybrid algorithm that
2687 combines the "window method" with look-up tables. For an
2688 introduction to exponentiation algorithms and "addition chains",
2689 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2690 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2691 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2692 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2694 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2695 multiplications to inline before calling the system library's pow
2696 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2697 so this default never requires calling pow, powf or powl. */
2699 #ifndef POWI_MAX_MULTS
2700 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2703 /* The size of the "optimal power tree" lookup table. All
2704 exponents less than this value are simply looked up in the
2705 powi_table below. This threshold is also used to size the
2706 cache of pseudo registers that hold intermediate results. */
2707 #define POWI_TABLE_SIZE 256
2709 /* The size, in bits of the window, used in the "window method"
2710 exponentiation algorithm. This is equivalent to a radix of
2711 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2712 #define POWI_WINDOW_SIZE 3
2714 /* The following table is an efficient representation of an
2715 "optimal power tree". For each value, i, the corresponding
2716 value, j, in the table states than an optimal evaluation
2717 sequence for calculating pow(x,i) can be found by evaluating
2718 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2719 100 integers is given in Knuth's "Seminumerical algorithms". */
2721 static const unsigned char powi_table
[POWI_TABLE_SIZE
] =
2723 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2724 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2725 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2726 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2727 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2728 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2729 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2730 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2731 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2732 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2733 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2734 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2735 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2736 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2737 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2738 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2739 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2740 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2741 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2742 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2743 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2744 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2745 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2746 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2747 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2748 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2749 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2750 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2751 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2752 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2753 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2754 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2758 /* Return the number of multiplications required to calculate
2759 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2760 subroutine of powi_cost. CACHE is an array indicating
2761 which exponents have already been calculated. */
2764 powi_lookup_cost (unsigned HOST_WIDE_INT n
, bool *cache
)
2766 /* If we've already calculated this exponent, then this evaluation
2767 doesn't require any additional multiplications. */
2772 return powi_lookup_cost (n
- powi_table
[n
], cache
)
2773 + powi_lookup_cost (powi_table
[n
], cache
) + 1;
2776 /* Return the number of multiplications required to calculate
2777 powi(x,n) for an arbitrary x, given the exponent N. This
2778 function needs to be kept in sync with expand_powi below. */
2781 powi_cost (HOST_WIDE_INT n
)
2783 bool cache
[POWI_TABLE_SIZE
];
2784 unsigned HOST_WIDE_INT digit
;
2785 unsigned HOST_WIDE_INT val
;
2791 /* Ignore the reciprocal when calculating the cost. */
2792 val
= (n
< 0) ? -n
: n
;
2794 /* Initialize the exponent cache. */
2795 memset (cache
, 0, POWI_TABLE_SIZE
* sizeof (bool));
2800 while (val
>= POWI_TABLE_SIZE
)
2804 digit
= val
& ((1 << POWI_WINDOW_SIZE
) - 1);
2805 result
+= powi_lookup_cost (digit
, cache
)
2806 + POWI_WINDOW_SIZE
+ 1;
2807 val
>>= POWI_WINDOW_SIZE
;
2816 return result
+ powi_lookup_cost (val
, cache
);
2819 /* Recursive subroutine of expand_powi. This function takes the array,
2820 CACHE, of already calculated exponents and an exponent N and returns
2821 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2824 expand_powi_1 (enum machine_mode mode
, unsigned HOST_WIDE_INT n
, rtx
*cache
)
2826 unsigned HOST_WIDE_INT digit
;
2830 if (n
< POWI_TABLE_SIZE
)
2835 target
= gen_reg_rtx (mode
);
2838 op0
= expand_powi_1 (mode
, n
- powi_table
[n
], cache
);
2839 op1
= expand_powi_1 (mode
, powi_table
[n
], cache
);
2843 target
= gen_reg_rtx (mode
);
2844 digit
= n
& ((1 << POWI_WINDOW_SIZE
) - 1);
2845 op0
= expand_powi_1 (mode
, n
- digit
, cache
);
2846 op1
= expand_powi_1 (mode
, digit
, cache
);
2850 target
= gen_reg_rtx (mode
);
2851 op0
= expand_powi_1 (mode
, n
>> 1, cache
);
2855 result
= expand_mult (mode
, op0
, op1
, target
, 0);
2856 if (result
!= target
)
2857 emit_move_insn (target
, result
);
2861 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2862 floating point operand in mode MODE, and N is the exponent. This
2863 function needs to be kept in sync with powi_cost above. */
2866 expand_powi (rtx x
, enum machine_mode mode
, HOST_WIDE_INT n
)
2868 unsigned HOST_WIDE_INT val
;
2869 rtx cache
[POWI_TABLE_SIZE
];
2873 return CONST1_RTX (mode
);
2875 val
= (n
< 0) ? -n
: n
;
2877 memset (cache
, 0, sizeof (cache
));
2880 result
= expand_powi_1 (mode
, (n
< 0) ? -n
: n
, cache
);
2882 /* If the original exponent was negative, reciprocate the result. */
2884 result
= expand_binop (mode
, sdiv_optab
, CONST1_RTX (mode
),
2885 result
, NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
2890 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2891 a normal call should be emitted rather than expanding the function
2892 in-line. EXP is the expression that is a call to the builtin
2893 function; if convenient, the result should be placed in TARGET. */
2896 expand_builtin_pow (tree exp
, rtx target
, rtx subtarget
)
2900 tree type
= TREE_TYPE (exp
);
2901 REAL_VALUE_TYPE cint
, c
, c2
;
2904 enum machine_mode mode
= TYPE_MODE (type
);
2906 if (! validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2909 arg0
= CALL_EXPR_ARG (exp
, 0);
2910 arg1
= CALL_EXPR_ARG (exp
, 1);
2912 if (TREE_CODE (arg1
) != REAL_CST
2913 || TREE_OVERFLOW (arg1
))
2914 return expand_builtin_mathfn_2 (exp
, target
, subtarget
);
2916 /* Handle constant exponents. */
2918 /* For integer valued exponents we can expand to an optimal multiplication
2919 sequence using expand_powi. */
2920 c
= TREE_REAL_CST (arg1
);
2921 n
= real_to_integer (&c
);
2922 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
2923 if (real_identical (&c
, &cint
)
2924 && ((n
>= -1 && n
<= 2)
2925 || (flag_unsafe_math_optimizations
2927 && powi_cost (n
) <= POWI_MAX_MULTS
)))
2929 op
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2932 op
= force_reg (mode
, op
);
2933 op
= expand_powi (op
, mode
, n
);
2938 narg0
= builtin_save_expr (arg0
);
2940 /* If the exponent is not integer valued, check if it is half of an integer.
2941 In this case we can expand to sqrt (x) * x**(n/2). */
2942 fn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
2943 if (fn
!= NULL_TREE
)
2945 real_arithmetic (&c2
, MULT_EXPR
, &c
, &dconst2
);
2946 n
= real_to_integer (&c2
);
2947 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
2948 if (real_identical (&c2
, &cint
)
2949 && ((flag_unsafe_math_optimizations
2951 && powi_cost (n
/2) <= POWI_MAX_MULTS
)
2954 tree call_expr
= build_call_expr (fn
, 1, narg0
);
2955 /* Use expand_expr in case the newly built call expression
2956 was folded to a non-call. */
2957 op
= expand_expr (call_expr
, subtarget
, mode
, EXPAND_NORMAL
);
2960 op2
= expand_expr (narg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2961 op2
= force_reg (mode
, op2
);
2962 op2
= expand_powi (op2
, mode
, abs (n
/ 2));
2963 op
= expand_simple_binop (mode
, MULT
, op
, op2
, NULL_RTX
,
2964 0, OPTAB_LIB_WIDEN
);
2965 /* If the original exponent was negative, reciprocate the
2968 op
= expand_binop (mode
, sdiv_optab
, CONST1_RTX (mode
),
2969 op
, NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
2975 /* Try if the exponent is a third of an integer. In this case
2976 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2977 different from pow (x, 1./3.) due to rounding and behavior
2978 with negative x we need to constrain this transformation to
2979 unsafe math and positive x or finite math. */
2980 fn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
2982 && flag_unsafe_math_optimizations
2983 && (tree_expr_nonnegative_p (arg0
)
2984 || !HONOR_NANS (mode
)))
2986 real_arithmetic (&c2
, MULT_EXPR
, &c
, &dconst3
);
2987 real_round (&c2
, mode
, &c2
);
2988 n
= real_to_integer (&c2
);
2989 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
2990 real_arithmetic (&c2
, RDIV_EXPR
, &cint
, &dconst3
);
2991 real_convert (&c2
, mode
, &c2
);
2992 if (real_identical (&c2
, &c
)
2994 && powi_cost (n
/3) <= POWI_MAX_MULTS
)
2997 tree call_expr
= build_call_expr (fn
, 1,narg0
);
2998 op
= expand_builtin (call_expr
, NULL_RTX
, subtarget
, mode
, 0);
2999 if (abs (n
) % 3 == 2)
3000 op
= expand_simple_binop (mode
, MULT
, op
, op
, op
,
3001 0, OPTAB_LIB_WIDEN
);
3004 op2
= expand_expr (narg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
3005 op2
= force_reg (mode
, op2
);
3006 op2
= expand_powi (op2
, mode
, abs (n
/ 3));
3007 op
= expand_simple_binop (mode
, MULT
, op
, op2
, NULL_RTX
,
3008 0, OPTAB_LIB_WIDEN
);
3009 /* If the original exponent was negative, reciprocate the
3012 op
= expand_binop (mode
, sdiv_optab
, CONST1_RTX (mode
),
3013 op
, NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
3019 /* Fall back to optab expansion. */
3020 return expand_builtin_mathfn_2 (exp
, target
, subtarget
);
3023 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3024 a normal call should be emitted rather than expanding the function
3025 in-line. EXP is the expression that is a call to the builtin
3026 function; if convenient, the result should be placed in TARGET. */
3029 expand_builtin_powi (tree exp
, rtx target
, rtx subtarget
)
3033 enum machine_mode mode
;
3034 enum machine_mode mode2
;
3036 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3039 arg0
= CALL_EXPR_ARG (exp
, 0);
3040 arg1
= CALL_EXPR_ARG (exp
, 1);
3041 mode
= TYPE_MODE (TREE_TYPE (exp
));
3043 /* Handle constant power. */
3045 if (TREE_CODE (arg1
) == INTEGER_CST
3046 && !TREE_OVERFLOW (arg1
))
3048 HOST_WIDE_INT n
= TREE_INT_CST_LOW (arg1
);
3050 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
3051 Otherwise, check the number of multiplications required. */
3052 if ((TREE_INT_CST_HIGH (arg1
) == 0
3053 || TREE_INT_CST_HIGH (arg1
) == -1)
3054 && ((n
>= -1 && n
<= 2)
3056 && powi_cost (n
) <= POWI_MAX_MULTS
)))
3058 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
3059 op0
= force_reg (mode
, op0
);
3060 return expand_powi (op0
, mode
, n
);
3064 /* Emit a libcall to libgcc. */
3066 /* Mode of the 2nd argument must match that of an int. */
3067 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
3069 if (target
== NULL_RTX
)
3070 target
= gen_reg_rtx (mode
);
3072 op0
= expand_expr (arg0
, subtarget
, mode
, EXPAND_NORMAL
);
3073 if (GET_MODE (op0
) != mode
)
3074 op0
= convert_to_mode (mode
, op0
, 0);
3075 op1
= expand_expr (arg1
, NULL_RTX
, mode2
, EXPAND_NORMAL
);
3076 if (GET_MODE (op1
) != mode2
)
3077 op1
= convert_to_mode (mode2
, op1
, 0);
3079 target
= emit_library_call_value (optab_libfunc (powi_optab
, mode
),
3080 target
, LCT_CONST_MAKE_BLOCK
, mode
, 2,
3081 op0
, mode
, op1
, mode2
);
3086 /* Expand expression EXP which is a call to the strlen builtin. Return
3087 NULL_RTX if we failed the caller should emit a normal call, otherwise
3088 try to get the result in TARGET, if convenient. */
3091 expand_builtin_strlen (tree exp
, rtx target
,
3092 enum machine_mode target_mode
)
3094 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
3100 tree src
= CALL_EXPR_ARG (exp
, 0);
3101 rtx result
, src_reg
, char_rtx
, before_strlen
;
3102 enum machine_mode insn_mode
= target_mode
, char_mode
;
3103 enum insn_code icode
= CODE_FOR_nothing
;
3106 /* If the length can be computed at compile-time, return it. */
3107 len
= c_strlen (src
, 0);
3109 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3111 /* If the length can be computed at compile-time and is constant
3112 integer, but there are side-effects in src, evaluate
3113 src for side-effects, then return len.
3114 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3115 can be optimized into: i++; x = 3; */
3116 len
= c_strlen (src
, 1);
3117 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
3119 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3120 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3123 align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
3125 /* If SRC is not a pointer type, don't do this operation inline. */
3129 /* Bail out if we can't compute strlen in the right mode. */
3130 while (insn_mode
!= VOIDmode
)
3132 icode
= optab_handler (strlen_optab
, insn_mode
)->insn_code
;
3133 if (icode
!= CODE_FOR_nothing
)
3136 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
3138 if (insn_mode
== VOIDmode
)
3141 /* Make a place to write the result of the instruction. */
3145 && GET_MODE (result
) == insn_mode
3146 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3147 result
= gen_reg_rtx (insn_mode
);
3149 /* Make a place to hold the source address. We will not expand
3150 the actual source until we are sure that the expansion will
3151 not fail -- there are trees that cannot be expanded twice. */
3152 src_reg
= gen_reg_rtx (Pmode
);
3154 /* Mark the beginning of the strlen sequence so we can emit the
3155 source operand later. */
3156 before_strlen
= get_last_insn ();
3158 char_rtx
= const0_rtx
;
3159 char_mode
= insn_data
[(int) icode
].operand
[2].mode
;
3160 if (! (*insn_data
[(int) icode
].operand
[2].predicate
) (char_rtx
,
3162 char_rtx
= copy_to_mode_reg (char_mode
, char_rtx
);
3164 pat
= GEN_FCN (icode
) (result
, gen_rtx_MEM (BLKmode
, src_reg
),
3165 char_rtx
, GEN_INT (align
));
3170 /* Now that we are assured of success, expand the source. */
3172 pat
= expand_expr (src
, src_reg
, ptr_mode
, EXPAND_NORMAL
);
3174 emit_move_insn (src_reg
, pat
);
3179 emit_insn_after (pat
, before_strlen
);
3181 emit_insn_before (pat
, get_insns ());
3183 /* Return the value in the proper mode for this function. */
3184 if (GET_MODE (result
) == target_mode
)
3186 else if (target
!= 0)
3187 convert_move (target
, result
, 0);
3189 target
= convert_to_mode (target_mode
, result
, 0);
3195 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3196 caller should emit a normal call, otherwise try to get the result
3197 in TARGET, if convenient (and in mode MODE if that's convenient). */
3200 expand_builtin_strstr (tree exp
, rtx target
, enum machine_mode mode
)
3202 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3204 tree type
= TREE_TYPE (exp
);
3205 tree result
= fold_builtin_strstr (CALL_EXPR_ARG (exp
, 0),
3206 CALL_EXPR_ARG (exp
, 1), type
);
3208 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3213 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3214 caller should emit a normal call, otherwise try to get the result
3215 in TARGET, if convenient (and in mode MODE if that's convenient). */
3218 expand_builtin_strchr (tree exp
, rtx target
, enum machine_mode mode
)
3220 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3222 tree type
= TREE_TYPE (exp
);
3223 tree result
= fold_builtin_strchr (CALL_EXPR_ARG (exp
, 0),
3224 CALL_EXPR_ARG (exp
, 1), type
);
3226 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3228 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3233 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3234 caller should emit a normal call, otherwise try to get the result
3235 in TARGET, if convenient (and in mode MODE if that's convenient). */
3238 expand_builtin_strrchr (tree exp
, rtx target
, enum machine_mode mode
)
3240 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3242 tree type
= TREE_TYPE (exp
);
3243 tree result
= fold_builtin_strrchr (CALL_EXPR_ARG (exp
, 0),
3244 CALL_EXPR_ARG (exp
, 1), type
);
3246 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3251 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3252 caller should emit a normal call, otherwise try to get the result
3253 in TARGET, if convenient (and in mode MODE if that's convenient). */
3256 expand_builtin_strpbrk (tree exp
, rtx target
, enum machine_mode mode
)
3258 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3260 tree type
= TREE_TYPE (exp
);
3261 tree result
= fold_builtin_strpbrk (CALL_EXPR_ARG (exp
, 0),
3262 CALL_EXPR_ARG (exp
, 1), type
);
3264 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3269 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3270 bytes from constant string DATA + OFFSET and return it as target
3274 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3275 enum machine_mode mode
)
3277 const char *str
= (const char *) data
;
3279 gcc_assert (offset
>= 0
3280 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3281 <= strlen (str
) + 1));
3283 return c_readstr (str
+ offset
, mode
);
3286 /* Expand a call EXP to the memcpy builtin.
3287 Return NULL_RTX if we failed, the caller should emit a normal call,
3288 otherwise try to get the result in TARGET, if convenient (and in
3289 mode MODE if that's convenient). */
3292 expand_builtin_memcpy (tree exp
, rtx target
, enum machine_mode mode
)
3294 tree fndecl
= get_callee_fndecl (exp
);
3296 if (!validate_arglist (exp
,
3297 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3301 tree dest
= CALL_EXPR_ARG (exp
, 0);
3302 tree src
= CALL_EXPR_ARG (exp
, 1);
3303 tree len
= CALL_EXPR_ARG (exp
, 2);
3304 const char *src_str
;
3305 unsigned int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
3306 unsigned int dest_align
3307 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3308 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3309 tree result
= fold_builtin_memory_op (dest
, src
, len
,
3310 TREE_TYPE (TREE_TYPE (fndecl
)),
3312 HOST_WIDE_INT expected_size
= -1;
3313 unsigned int expected_align
= 0;
3317 while (TREE_CODE (result
) == COMPOUND_EXPR
)
3319 expand_expr (TREE_OPERAND (result
, 0), const0_rtx
, VOIDmode
,
3321 result
= TREE_OPERAND (result
, 1);
3323 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3326 /* If DEST is not a pointer type, call the normal function. */
3327 if (dest_align
== 0)
3330 /* If either SRC is not a pointer type, don't do this
3331 operation in-line. */
3335 stringop_block_profile (exp
, &expected_align
, &expected_size
);
3336 if (expected_align
< dest_align
)
3337 expected_align
= dest_align
;
3338 dest_mem
= get_memory_rtx (dest
, len
);
3339 set_mem_align (dest_mem
, dest_align
);
3340 len_rtx
= expand_normal (len
);
3341 src_str
= c_getstr (src
);
3343 /* If SRC is a string constant and block move would be done
3344 by pieces, we can avoid loading the string from memory
3345 and only stored the computed constants. */
3347 && GET_CODE (len_rtx
) == CONST_INT
3348 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3349 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3350 (void *) src_str
, dest_align
, false))
3352 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3353 builtin_memcpy_read_str
,
3354 (void *) src_str
, dest_align
, false, 0);
3355 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3356 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3360 src_mem
= get_memory_rtx (src
, len
);
3361 set_mem_align (src_mem
, src_align
);
3363 /* Copy word part most expediently. */
3364 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3365 CALL_EXPR_TAILCALL (exp
)
3366 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3367 expected_align
, expected_size
);
3371 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3372 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3378 /* Expand a call EXP to the mempcpy builtin.
3379 Return NULL_RTX if we failed; the caller should emit a normal call,
3380 otherwise try to get the result in TARGET, if convenient (and in
3381 mode MODE if that's convenient). If ENDP is 0 return the
3382 destination pointer, if ENDP is 1 return the end pointer ala
3383 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3387 expand_builtin_mempcpy(tree exp
, rtx target
, enum machine_mode mode
)
3389 if (!validate_arglist (exp
,
3390 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3394 tree dest
= CALL_EXPR_ARG (exp
, 0);
3395 tree src
= CALL_EXPR_ARG (exp
, 1);
3396 tree len
= CALL_EXPR_ARG (exp
, 2);
3397 return expand_builtin_mempcpy_args (dest
, src
, len
,
3399 target
, mode
, /*endp=*/ 1);
3403 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3404 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3405 so that this can also be called without constructing an actual CALL_EXPR.
3406 TYPE is the return type of the call. The other arguments and return value
3407 are the same as for expand_builtin_mempcpy. */
3410 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
, tree type
,
3411 rtx target
, enum machine_mode mode
, int endp
)
3413 /* If return value is ignored, transform mempcpy into memcpy. */
3414 if (target
== const0_rtx
)
3416 tree fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
3421 return expand_expr (build_call_expr (fn
, 3, dest
, src
, len
),
3422 target
, mode
, EXPAND_NORMAL
);
3426 const char *src_str
;
3427 unsigned int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
3428 unsigned int dest_align
3429 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3430 rtx dest_mem
, src_mem
, len_rtx
;
3431 tree result
= fold_builtin_memory_op (dest
, src
, len
, type
, false, endp
);
3435 while (TREE_CODE (result
) == COMPOUND_EXPR
)
3437 expand_expr (TREE_OPERAND (result
, 0), const0_rtx
, VOIDmode
,
3439 result
= TREE_OPERAND (result
, 1);
3441 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3444 /* If either SRC or DEST is not a pointer type, don't do this
3445 operation in-line. */
3446 if (dest_align
== 0 || src_align
== 0)
3449 /* If LEN is not constant, call the normal function. */
3450 if (! host_integerp (len
, 1))
3453 len_rtx
= expand_normal (len
);
3454 src_str
= c_getstr (src
);
3456 /* If SRC is a string constant and block move would be done
3457 by pieces, we can avoid loading the string from memory
3458 and only stored the computed constants. */
3460 && GET_CODE (len_rtx
) == CONST_INT
3461 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3462 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3463 (void *) src_str
, dest_align
, false))
3465 dest_mem
= get_memory_rtx (dest
, len
);
3466 set_mem_align (dest_mem
, dest_align
);
3467 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3468 builtin_memcpy_read_str
,
3469 (void *) src_str
, dest_align
,
3471 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3472 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3476 if (GET_CODE (len_rtx
) == CONST_INT
3477 && can_move_by_pieces (INTVAL (len_rtx
),
3478 MIN (dest_align
, src_align
)))
3480 dest_mem
= get_memory_rtx (dest
, len
);
3481 set_mem_align (dest_mem
, dest_align
);
3482 src_mem
= get_memory_rtx (src
, len
);
3483 set_mem_align (src_mem
, src_align
);
3484 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3485 MIN (dest_align
, src_align
), endp
);
3486 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3487 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3495 /* Expand expression EXP, which is a call to the memmove builtin. Return
3496 NULL_RTX if we failed; the caller should emit a normal call. */
3499 expand_builtin_memmove (tree exp
, rtx target
, enum machine_mode mode
, int ignore
)
3501 if (!validate_arglist (exp
,
3502 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3506 tree dest
= CALL_EXPR_ARG (exp
, 0);
3507 tree src
= CALL_EXPR_ARG (exp
, 1);
3508 tree len
= CALL_EXPR_ARG (exp
, 2);
3509 return expand_builtin_memmove_args (dest
, src
, len
, TREE_TYPE (exp
),
3510 target
, mode
, ignore
);
3514 /* Helper function to do the actual work for expand_builtin_memmove. The
3515 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3516 so that this can also be called without constructing an actual CALL_EXPR.
3517 TYPE is the return type of the call. The other arguments and return value
3518 are the same as for expand_builtin_memmove. */
3521 expand_builtin_memmove_args (tree dest
, tree src
, tree len
,
3522 tree type
, rtx target
, enum machine_mode mode
,
3525 tree result
= fold_builtin_memory_op (dest
, src
, len
, type
, ignore
, /*endp=*/3);
3529 STRIP_TYPE_NOPS (result
);
3530 while (TREE_CODE (result
) == COMPOUND_EXPR
)
3532 expand_expr (TREE_OPERAND (result
, 0), const0_rtx
, VOIDmode
,
3534 result
= TREE_OPERAND (result
, 1);
3536 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3539 /* Otherwise, call the normal function. */
3543 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3544 NULL_RTX if we failed the caller should emit a normal call. */
3547 expand_builtin_bcopy (tree exp
, int ignore
)
3549 tree type
= TREE_TYPE (exp
);
3550 tree src
, dest
, size
;
3552 if (!validate_arglist (exp
,
3553 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3556 src
= CALL_EXPR_ARG (exp
, 0);
3557 dest
= CALL_EXPR_ARG (exp
, 1);
3558 size
= CALL_EXPR_ARG (exp
, 2);
3560 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3561 This is done this way so that if it isn't expanded inline, we fall
3562 back to calling bcopy instead of memmove. */
3563 return expand_builtin_memmove_args (dest
, src
,
3564 fold_convert (sizetype
, size
),
3565 type
, const0_rtx
, VOIDmode
,
3570 # define HAVE_movstr 0
3571 # define CODE_FOR_movstr CODE_FOR_nothing
3574 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3575 we failed, the caller should emit a normal call, otherwise try to
3576 get the result in TARGET, if convenient. If ENDP is 0 return the
3577 destination pointer, if ENDP is 1 return the end pointer ala
3578 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3582 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3588 const struct insn_data
* data
;
3593 dest_mem
= get_memory_rtx (dest
, NULL
);
3594 src_mem
= get_memory_rtx (src
, NULL
);
3597 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3598 dest_mem
= replace_equiv_address (dest_mem
, target
);
3599 end
= gen_reg_rtx (Pmode
);
3603 if (target
== 0 || target
== const0_rtx
)
3605 end
= gen_reg_rtx (Pmode
);
3613 data
= insn_data
+ CODE_FOR_movstr
;
3615 if (data
->operand
[0].mode
!= VOIDmode
)
3616 end
= gen_lowpart (data
->operand
[0].mode
, end
);
3618 insn
= data
->genfun (end
, dest_mem
, src_mem
);
3624 /* movstr is supposed to set end to the address of the NUL
3625 terminator. If the caller requested a mempcpy-like return value,
3627 if (endp
== 1 && target
!= const0_rtx
)
3629 rtx tem
= plus_constant (gen_lowpart (GET_MODE (target
), end
), 1);
3630 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3636 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3637 NULL_RTX if we failed the caller should emit a normal call, otherwise
3638 try to get the result in TARGET, if convenient (and in mode MODE if that's
3642 expand_builtin_strcpy (tree fndecl
, tree exp
, rtx target
, enum machine_mode mode
)
3644 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3646 tree dest
= CALL_EXPR_ARG (exp
, 0);
3647 tree src
= CALL_EXPR_ARG (exp
, 1);
3648 return expand_builtin_strcpy_args (fndecl
, dest
, src
, target
, mode
);
3653 /* Helper function to do the actual work for expand_builtin_strcpy. The
3654 arguments to the builtin_strcpy call DEST and SRC are broken out
3655 so that this can also be called without constructing an actual CALL_EXPR.
3656 The other arguments and return value are the same as for
3657 expand_builtin_strcpy. */
3660 expand_builtin_strcpy_args (tree fndecl
, tree dest
, tree src
,
3661 rtx target
, enum machine_mode mode
)
3663 tree result
= fold_builtin_strcpy (fndecl
, dest
, src
, 0);
3665 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3666 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3670 /* Expand a call EXP to the stpcpy builtin.
3671 Return NULL_RTX if we failed the caller should emit a normal call,
3672 otherwise try to get the result in TARGET, if convenient (and in
3673 mode MODE if that's convenient). */
3676 expand_builtin_stpcpy (tree exp
, rtx target
, enum machine_mode mode
)
3680 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3683 dst
= CALL_EXPR_ARG (exp
, 0);
3684 src
= CALL_EXPR_ARG (exp
, 1);
3686 /* If return value is ignored, transform stpcpy into strcpy. */
3687 if (target
== const0_rtx
)
3689 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
3693 return expand_expr (build_call_expr (fn
, 2, dst
, src
),
3694 target
, mode
, EXPAND_NORMAL
);
3701 /* Ensure we get an actual string whose length can be evaluated at
3702 compile-time, not an expression containing a string. This is
3703 because the latter will potentially produce pessimized code
3704 when used to produce the return value. */
3705 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3706 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3708 lenp1
= size_binop (PLUS_EXPR
, len
, ssize_int (1));
3709 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
, TREE_TYPE (exp
),
3710 target
, mode
, /*endp=*/2);
3715 if (TREE_CODE (len
) == INTEGER_CST
)
3717 rtx len_rtx
= expand_normal (len
);
3719 if (GET_CODE (len_rtx
) == CONST_INT
)
3721 ret
= expand_builtin_strcpy_args (get_callee_fndecl (exp
),
3722 dst
, src
, target
, mode
);
3728 if (mode
!= VOIDmode
)
3729 target
= gen_reg_rtx (mode
);
3731 target
= gen_reg_rtx (GET_MODE (ret
));
3733 if (GET_MODE (target
) != GET_MODE (ret
))
3734 ret
= gen_lowpart (GET_MODE (target
), ret
);
3736 ret
= plus_constant (ret
, INTVAL (len_rtx
));
3737 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3745 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3749 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3750 bytes from constant string DATA + OFFSET and return it as target
3754 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3755 enum machine_mode mode
)
3757 const char *str
= (const char *) data
;
3759 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3762 return c_readstr (str
+ offset
, mode
);
3765 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3766 NULL_RTX if we failed the caller should emit a normal call. */
3769 expand_builtin_strncpy (tree exp
, rtx target
, enum machine_mode mode
)
3771 tree fndecl
= get_callee_fndecl (exp
);
3773 if (validate_arglist (exp
,
3774 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3776 tree dest
= CALL_EXPR_ARG (exp
, 0);
3777 tree src
= CALL_EXPR_ARG (exp
, 1);
3778 tree len
= CALL_EXPR_ARG (exp
, 2);
3779 tree slen
= c_strlen (src
, 1);
3780 tree result
= fold_builtin_strncpy (fndecl
, dest
, src
, len
, slen
);
3784 while (TREE_CODE (result
) == COMPOUND_EXPR
)
3786 expand_expr (TREE_OPERAND (result
, 0), const0_rtx
, VOIDmode
,
3788 result
= TREE_OPERAND (result
, 1);
3790 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3793 /* We must be passed a constant len and src parameter. */
3794 if (!host_integerp (len
, 1) || !slen
|| !host_integerp (slen
, 1))
3797 slen
= size_binop (PLUS_EXPR
, slen
, ssize_int (1));
3799 /* We're required to pad with trailing zeros if the requested
3800 len is greater than strlen(s2)+1. In that case try to
3801 use store_by_pieces, if it fails, punt. */
3802 if (tree_int_cst_lt (slen
, len
))
3804 unsigned int dest_align
3805 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3806 const char *p
= c_getstr (src
);
3809 if (!p
|| dest_align
== 0 || !host_integerp (len
, 1)
3810 || !can_store_by_pieces (tree_low_cst (len
, 1),
3811 builtin_strncpy_read_str
,
3812 (void *) p
, dest_align
, false))
3815 dest_mem
= get_memory_rtx (dest
, len
);
3816 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3817 builtin_strncpy_read_str
,
3818 (void *) p
, dest_align
, false, 0);
3819 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3820 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3827 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3828 bytes from constant string DATA + OFFSET and return it as target
3832 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3833 enum machine_mode mode
)
3835 const char *c
= (const char *) data
;
3836 char *p
= alloca (GET_MODE_SIZE (mode
));
3838 memset (p
, *c
, GET_MODE_SIZE (mode
));
3840 return c_readstr (p
, mode
);
3843 /* Callback routine for store_by_pieces. Return the RTL of a register
3844 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3845 char value given in the RTL register data. For example, if mode is
3846 4 bytes wide, return the RTL for 0x01010101*data. */
3849 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3850 enum machine_mode mode
)
3856 size
= GET_MODE_SIZE (mode
);
3861 memset (p
, 1, size
);
3862 coeff
= c_readstr (p
, mode
);
3864 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3865 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3866 return force_reg (mode
, target
);
3869 /* Expand expression EXP, which is a call to the memset builtin. Return
3870 NULL_RTX if we failed the caller should emit a normal call, otherwise
3871 try to get the result in TARGET, if convenient (and in mode MODE if that's
3875 expand_builtin_memset (tree exp
, rtx target
, enum machine_mode mode
)
3877 if (!validate_arglist (exp
,
3878 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3882 tree dest
= CALL_EXPR_ARG (exp
, 0);
3883 tree val
= CALL_EXPR_ARG (exp
, 1);
3884 tree len
= CALL_EXPR_ARG (exp
, 2);
3885 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3889 /* Helper function to do the actual work for expand_builtin_memset. The
3890 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3891 so that this can also be called without constructing an actual CALL_EXPR.
3892 The other arguments and return value are the same as for
3893 expand_builtin_memset. */
3896 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3897 rtx target
, enum machine_mode mode
, tree orig_exp
)
3900 enum built_in_function fcode
;
3902 unsigned int dest_align
;
3903 rtx dest_mem
, dest_addr
, len_rtx
;
3904 HOST_WIDE_INT expected_size
= -1;
3905 unsigned int expected_align
= 0;
3907 dest_align
= get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3909 /* If DEST is not a pointer type, don't do this operation in-line. */
3910 if (dest_align
== 0)
3913 stringop_block_profile (orig_exp
, &expected_align
, &expected_size
);
3914 if (expected_align
< dest_align
)
3915 expected_align
= dest_align
;
3917 /* If the LEN parameter is zero, return DEST. */
3918 if (integer_zerop (len
))
3920 /* Evaluate and ignore VAL in case it has side-effects. */
3921 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3922 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3925 /* Stabilize the arguments in case we fail. */
3926 dest
= builtin_save_expr (dest
);
3927 val
= builtin_save_expr (val
);
3928 len
= builtin_save_expr (len
);
3930 len_rtx
= expand_normal (len
);
3931 dest_mem
= get_memory_rtx (dest
, len
);
3933 if (TREE_CODE (val
) != INTEGER_CST
)
3937 val_rtx
= expand_normal (val
);
3938 val_rtx
= convert_to_mode (TYPE_MODE (unsigned_char_type_node
),
3941 /* Assume that we can memset by pieces if we can store
3942 * the coefficients by pieces (in the required modes).
3943 * We can't pass builtin_memset_gen_str as that emits RTL. */
3945 if (host_integerp (len
, 1)
3946 && can_store_by_pieces (tree_low_cst (len
, 1),
3947 builtin_memset_read_str
, &c
, dest_align
,
3950 val_rtx
= force_reg (TYPE_MODE (unsigned_char_type_node
),
3952 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3953 builtin_memset_gen_str
, val_rtx
, dest_align
,
3956 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3957 dest_align
, expected_align
,
3961 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3962 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3966 if (target_char_cast (val
, &c
))
3971 if (host_integerp (len
, 1)
3972 && can_store_by_pieces (tree_low_cst (len
, 1),
3973 builtin_memset_read_str
, &c
, dest_align
,
3975 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3976 builtin_memset_read_str
, &c
, dest_align
, true, 0);
3977 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, GEN_INT (c
),
3978 dest_align
, expected_align
,
3982 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3983 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3987 set_mem_align (dest_mem
, dest_align
);
3988 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
3989 CALL_EXPR_TAILCALL (orig_exp
)
3990 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3991 expected_align
, expected_size
);
3995 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3996 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
4002 fndecl
= get_callee_fndecl (orig_exp
);
4003 fcode
= DECL_FUNCTION_CODE (fndecl
);
4004 if (fcode
== BUILT_IN_MEMSET
)
4005 fn
= build_call_expr (fndecl
, 3, dest
, val
, len
);
4006 else if (fcode
== BUILT_IN_BZERO
)
4007 fn
= build_call_expr (fndecl
, 2, dest
, len
);
4010 if (TREE_CODE (fn
) == CALL_EXPR
)
4011 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
4012 return expand_call (fn
, target
, target
== const0_rtx
);
4015 /* Expand expression EXP, which is a call to the bzero builtin. Return
4016 NULL_RTX if we failed the caller should emit a normal call. */
4019 expand_builtin_bzero (tree exp
)
4023 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4026 dest
= CALL_EXPR_ARG (exp
, 0);
4027 size
= CALL_EXPR_ARG (exp
, 1);
4029 /* New argument list transforming bzero(ptr x, int y) to
4030 memset(ptr x, int 0, size_t y). This is done this way
4031 so that if it isn't expanded inline, we fallback to
4032 calling bzero instead of memset. */
4034 return expand_builtin_memset_args (dest
, integer_zero_node
,
4035 fold_convert (sizetype
, size
),
4036 const0_rtx
, VOIDmode
, exp
);
4039 /* Expand a call to the memchr builtin. Return NULL_RTX if we failed the
4040 caller should emit a normal call, otherwise try to get the result
4041 in TARGET, if convenient (and in mode MODE if that's convenient). */
4044 expand_builtin_memchr (tree exp
, rtx target
, enum machine_mode mode
)
4046 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
,
4047 INTEGER_TYPE
, VOID_TYPE
))
4049 tree type
= TREE_TYPE (exp
);
4050 tree result
= fold_builtin_memchr (CALL_EXPR_ARG (exp
, 0),
4051 CALL_EXPR_ARG (exp
, 1),
4052 CALL_EXPR_ARG (exp
, 2), type
);
4054 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4059 /* Expand expression EXP, which is a call to the memcmp built-in function.
4060 Return NULL_RTX if we failed and the
4061 caller should emit a normal call, otherwise try to get the result in
4062 TARGET, if convenient (and in mode MODE, if that's convenient). */
4065 expand_builtin_memcmp (tree exp
, rtx target
, enum machine_mode mode
)
4067 if (!validate_arglist (exp
,
4068 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4072 tree result
= fold_builtin_memcmp (CALL_EXPR_ARG (exp
, 0),
4073 CALL_EXPR_ARG (exp
, 1),
4074 CALL_EXPR_ARG (exp
, 2));
4076 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4079 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4081 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4084 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4085 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4086 tree len
= CALL_EXPR_ARG (exp
, 2);
4089 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4091 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4092 enum machine_mode insn_mode
;
4094 #ifdef HAVE_cmpmemsi
4096 insn_mode
= insn_data
[(int) CODE_FOR_cmpmemsi
].operand
[0].mode
;
4099 #ifdef HAVE_cmpstrnsi
4101 insn_mode
= insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4106 /* If we don't have POINTER_TYPE, call the function. */
4107 if (arg1_align
== 0 || arg2_align
== 0)
4110 /* Make a place to write the result of the instruction. */
4113 && REG_P (result
) && GET_MODE (result
) == insn_mode
4114 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4115 result
= gen_reg_rtx (insn_mode
);
4117 arg1_rtx
= get_memory_rtx (arg1
, len
);
4118 arg2_rtx
= get_memory_rtx (arg2
, len
);
4119 arg3_rtx
= expand_normal (len
);
4121 /* Set MEM_SIZE as appropriate. */
4122 if (GET_CODE (arg3_rtx
) == CONST_INT
)
4124 set_mem_size (arg1_rtx
, arg3_rtx
);
4125 set_mem_size (arg2_rtx
, arg3_rtx
);
4128 #ifdef HAVE_cmpmemsi
4130 insn
= gen_cmpmemsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4131 GEN_INT (MIN (arg1_align
, arg2_align
)));
4134 #ifdef HAVE_cmpstrnsi
4136 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4137 GEN_INT (MIN (arg1_align
, arg2_align
)));
4145 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE_MAKE_BLOCK
,
4146 TYPE_MODE (integer_type_node
), 3,
4147 XEXP (arg1_rtx
, 0), Pmode
,
4148 XEXP (arg2_rtx
, 0), Pmode
,
4149 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
4150 TYPE_UNSIGNED (sizetype
)),
4151 TYPE_MODE (sizetype
));
4153 /* Return the value in the proper mode for this function. */
4154 mode
= TYPE_MODE (TREE_TYPE (exp
));
4155 if (GET_MODE (result
) == mode
)
4157 else if (target
!= 0)
4159 convert_move (target
, result
, 0);
4163 return convert_to_mode (mode
, result
, 0);
4170 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4171 if we failed the caller should emit a normal call, otherwise try to get
4172 the result in TARGET, if convenient. */
4175 expand_builtin_strcmp (tree exp
, rtx target
, enum machine_mode mode
)
4177 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4181 tree result
= fold_builtin_strcmp (CALL_EXPR_ARG (exp
, 0),
4182 CALL_EXPR_ARG (exp
, 1));
4184 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4187 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4188 if (cmpstr_optab
[SImode
] != CODE_FOR_nothing
4189 || cmpstrn_optab
[SImode
] != CODE_FOR_nothing
)
4191 rtx arg1_rtx
, arg2_rtx
;
4192 rtx result
, insn
= NULL_RTX
;
4194 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4195 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4198 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4200 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4202 /* If we don't have POINTER_TYPE, call the function. */
4203 if (arg1_align
== 0 || arg2_align
== 0)
4206 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4207 arg1
= builtin_save_expr (arg1
);
4208 arg2
= builtin_save_expr (arg2
);
4210 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4211 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4213 #ifdef HAVE_cmpstrsi
4214 /* Try to call cmpstrsi. */
4217 enum machine_mode insn_mode
4218 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
4220 /* Make a place to write the result of the instruction. */
4223 && REG_P (result
) && GET_MODE (result
) == insn_mode
4224 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4225 result
= gen_reg_rtx (insn_mode
);
4227 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
,
4228 GEN_INT (MIN (arg1_align
, arg2_align
)));
4231 #ifdef HAVE_cmpstrnsi
4232 /* Try to determine at least one length and call cmpstrnsi. */
4233 if (!insn
&& HAVE_cmpstrnsi
)
4238 enum machine_mode insn_mode
4239 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4240 tree len1
= c_strlen (arg1
, 1);
4241 tree len2
= c_strlen (arg2
, 1);
4244 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4246 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4248 /* If we don't have a constant length for the first, use the length
4249 of the second, if we know it. We don't require a constant for
4250 this case; some cost analysis could be done if both are available
4251 but neither is constant. For now, assume they're equally cheap,
4252 unless one has side effects. If both strings have constant lengths,
4259 else if (TREE_SIDE_EFFECTS (len1
))
4261 else if (TREE_SIDE_EFFECTS (len2
))
4263 else if (TREE_CODE (len1
) != INTEGER_CST
)
4265 else if (TREE_CODE (len2
) != INTEGER_CST
)
4267 else if (tree_int_cst_lt (len1
, len2
))
4272 /* If both arguments have side effects, we cannot optimize. */
4273 if (!len
|| TREE_SIDE_EFFECTS (len
))
4276 arg3_rtx
= expand_normal (len
);
4278 /* Make a place to write the result of the instruction. */
4281 && REG_P (result
) && GET_MODE (result
) == insn_mode
4282 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4283 result
= gen_reg_rtx (insn_mode
);
4285 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4286 GEN_INT (MIN (arg1_align
, arg2_align
)));
4294 /* Return the value in the proper mode for this function. */
4295 mode
= TYPE_MODE (TREE_TYPE (exp
));
4296 if (GET_MODE (result
) == mode
)
4299 return convert_to_mode (mode
, result
, 0);
4300 convert_move (target
, result
, 0);
4304 /* Expand the library call ourselves using a stabilized argument
4305 list to avoid re-evaluating the function's arguments twice. */
4306 #ifdef HAVE_cmpstrnsi
4309 fndecl
= get_callee_fndecl (exp
);
4310 fn
= build_call_expr (fndecl
, 2, arg1
, arg2
);
4311 if (TREE_CODE (fn
) == CALL_EXPR
)
4312 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4313 return expand_call (fn
, target
, target
== const0_rtx
);
4319 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4320 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4321 the result in TARGET, if convenient. */
4324 expand_builtin_strncmp (tree exp
, rtx target
, enum machine_mode mode
)
4326 if (!validate_arglist (exp
,
4327 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4331 tree result
= fold_builtin_strncmp (CALL_EXPR_ARG (exp
, 0),
4332 CALL_EXPR_ARG (exp
, 1),
4333 CALL_EXPR_ARG (exp
, 2));
4335 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4338 /* If c_strlen can determine an expression for one of the string
4339 lengths, and it doesn't have side effects, then emit cmpstrnsi
4340 using length MIN(strlen(string)+1, arg3). */
4341 #ifdef HAVE_cmpstrnsi
4344 tree len
, len1
, len2
;
4345 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4348 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4349 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4350 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4353 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4355 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4356 enum machine_mode insn_mode
4357 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4359 len1
= c_strlen (arg1
, 1);
4360 len2
= c_strlen (arg2
, 1);
4363 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4365 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4367 /* If we don't have a constant length for the first, use the length
4368 of the second, if we know it. We don't require a constant for
4369 this case; some cost analysis could be done if both are available
4370 but neither is constant. For now, assume they're equally cheap,
4371 unless one has side effects. If both strings have constant lengths,
4378 else if (TREE_SIDE_EFFECTS (len1
))
4380 else if (TREE_SIDE_EFFECTS (len2
))
4382 else if (TREE_CODE (len1
) != INTEGER_CST
)
4384 else if (TREE_CODE (len2
) != INTEGER_CST
)
4386 else if (tree_int_cst_lt (len1
, len2
))
4391 /* If both arguments have side effects, we cannot optimize. */
4392 if (!len
|| TREE_SIDE_EFFECTS (len
))
4395 /* The actual new length parameter is MIN(len,arg3). */
4396 len
= fold_build2 (MIN_EXPR
, TREE_TYPE (len
), len
,
4397 fold_convert (TREE_TYPE (len
), arg3
));
4399 /* If we don't have POINTER_TYPE, call the function. */
4400 if (arg1_align
== 0 || arg2_align
== 0)
4403 /* Make a place to write the result of the instruction. */
4406 && REG_P (result
) && GET_MODE (result
) == insn_mode
4407 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4408 result
= gen_reg_rtx (insn_mode
);
4410 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4411 arg1
= builtin_save_expr (arg1
);
4412 arg2
= builtin_save_expr (arg2
);
4413 len
= builtin_save_expr (len
);
4415 arg1_rtx
= get_memory_rtx (arg1
, len
);
4416 arg2_rtx
= get_memory_rtx (arg2
, len
);
4417 arg3_rtx
= expand_normal (len
);
4418 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4419 GEN_INT (MIN (arg1_align
, arg2_align
)));
4424 /* Return the value in the proper mode for this function. */
4425 mode
= TYPE_MODE (TREE_TYPE (exp
));
4426 if (GET_MODE (result
) == mode
)
4429 return convert_to_mode (mode
, result
, 0);
4430 convert_move (target
, result
, 0);
4434 /* Expand the library call ourselves using a stabilized argument
4435 list to avoid re-evaluating the function's arguments twice. */
4436 fndecl
= get_callee_fndecl (exp
);
4437 fn
= build_call_expr (fndecl
, 3, arg1
, arg2
, len
);
4438 if (TREE_CODE (fn
) == CALL_EXPR
)
4439 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4440 return expand_call (fn
, target
, target
== const0_rtx
);
4446 /* Expand expression EXP, which is a call to the strcat builtin.
4447 Return NULL_RTX if we failed the caller should emit a normal call,
4448 otherwise try to get the result in TARGET, if convenient. */
4451 expand_builtin_strcat (tree fndecl
, tree exp
, rtx target
, enum machine_mode mode
)
4453 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4457 tree dst
= CALL_EXPR_ARG (exp
, 0);
4458 tree src
= CALL_EXPR_ARG (exp
, 1);
4459 const char *p
= c_getstr (src
);
4461 /* If the string length is zero, return the dst parameter. */
4462 if (p
&& *p
== '\0')
4463 return expand_expr (dst
, target
, mode
, EXPAND_NORMAL
);
4467 /* See if we can store by pieces into (dst + strlen(dst)). */
4468 tree newsrc
, newdst
,
4469 strlen_fn
= implicit_built_in_decls
[BUILT_IN_STRLEN
];
4472 /* Stabilize the argument list. */
4473 newsrc
= builtin_save_expr (src
);
4474 dst
= builtin_save_expr (dst
);
4478 /* Create strlen (dst). */
4479 newdst
= build_call_expr (strlen_fn
, 1, dst
);
4480 /* Create (dst p+ strlen (dst)). */
4482 newdst
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (dst
), dst
, newdst
);
4483 newdst
= builtin_save_expr (newdst
);
4485 if (!expand_builtin_strcpy_args (fndecl
, newdst
, newsrc
, target
, mode
))
4487 end_sequence (); /* Stop sequence. */
4491 /* Output the entire sequence. */
4492 insns
= get_insns ();
4496 return expand_expr (dst
, target
, mode
, EXPAND_NORMAL
);
4503 /* Expand expression EXP, which is a call to the strncat builtin.
4504 Return NULL_RTX if we failed the caller should emit a normal call,
4505 otherwise try to get the result in TARGET, if convenient. */
4508 expand_builtin_strncat (tree exp
, rtx target
, enum machine_mode mode
)
4510 if (validate_arglist (exp
,
4511 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4513 tree result
= fold_builtin_strncat (CALL_EXPR_ARG (exp
, 0),
4514 CALL_EXPR_ARG (exp
, 1),
4515 CALL_EXPR_ARG (exp
, 2));
4517 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4522 /* Expand expression EXP, which is a call to the strspn builtin.
4523 Return NULL_RTX if we failed the caller should emit a normal call,
4524 otherwise try to get the result in TARGET, if convenient. */
4527 expand_builtin_strspn (tree exp
, rtx target
, enum machine_mode mode
)
4529 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4531 tree result
= fold_builtin_strspn (CALL_EXPR_ARG (exp
, 0),
4532 CALL_EXPR_ARG (exp
, 1));
4534 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4539 /* Expand expression EXP, which is a call to the strcspn builtin.
4540 Return NULL_RTX if we failed the caller should emit a normal call,
4541 otherwise try to get the result in TARGET, if convenient. */
4544 expand_builtin_strcspn (tree exp
, rtx target
, enum machine_mode mode
)
4546 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4548 tree result
= fold_builtin_strcspn (CALL_EXPR_ARG (exp
, 0),
4549 CALL_EXPR_ARG (exp
, 1));
4551 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4556 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4557 if that's convenient. */
4560 expand_builtin_saveregs (void)
4564 /* Don't do __builtin_saveregs more than once in a function.
4565 Save the result of the first call and reuse it. */
4566 if (saveregs_value
!= 0)
4567 return saveregs_value
;
4569 /* When this function is called, it means that registers must be
4570 saved on entry to this function. So we migrate the call to the
4571 first insn of this function. */
4575 /* Do whatever the machine needs done in this case. */
4576 val
= targetm
.calls
.expand_builtin_saveregs ();
4581 saveregs_value
= val
;
4583 /* Put the insns after the NOTE that starts the function. If this
4584 is inside a start_sequence, make the outer-level insn chain current, so
4585 the code is placed at the start of the function. */
4586 push_topmost_sequence ();
4587 emit_insn_after (seq
, entry_of_function ());
4588 pop_topmost_sequence ();
4593 /* __builtin_args_info (N) returns word N of the arg space info
4594 for the current function. The number and meanings of words
4595 is controlled by the definition of CUMULATIVE_ARGS. */
4598 expand_builtin_args_info (tree exp
)
4600 int nwords
= sizeof (CUMULATIVE_ARGS
) / sizeof (int);
4601 int *word_ptr
= (int *) ¤t_function_args_info
;
4603 gcc_assert (sizeof (CUMULATIVE_ARGS
) % sizeof (int) == 0);
4605 if (call_expr_nargs (exp
) != 0)
4607 if (!host_integerp (CALL_EXPR_ARG (exp
, 0), 0))
4608 error ("argument of %<__builtin_args_info%> must be constant");
4611 HOST_WIDE_INT wordnum
= tree_low_cst (CALL_EXPR_ARG (exp
, 0), 0);
4613 if (wordnum
< 0 || wordnum
>= nwords
)
4614 error ("argument of %<__builtin_args_info%> out of range");
4616 return GEN_INT (word_ptr
[wordnum
]);
4620 error ("missing argument in %<__builtin_args_info%>");
4625 /* Expand a call to __builtin_next_arg. */
4628 expand_builtin_next_arg (void)
4630 /* Checking arguments is already done in fold_builtin_next_arg
4631 that must be called before this function. */
4632 return expand_binop (ptr_mode
, add_optab
,
4633 current_function_internal_arg_pointer
,
4634 current_function_arg_offset_rtx
,
4635 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4638 /* Make it easier for the backends by protecting the valist argument
4639 from multiple evaluations. */
4642 stabilize_va_list (tree valist
, int needs_lvalue
)
4644 if (TREE_CODE (va_list_type_node
) == ARRAY_TYPE
)
4646 if (TREE_SIDE_EFFECTS (valist
))
4647 valist
= save_expr (valist
);
4649 /* For this case, the backends will be expecting a pointer to
4650 TREE_TYPE (va_list_type_node), but it's possible we've
4651 actually been given an array (an actual va_list_type_node).
4653 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4655 tree p1
= build_pointer_type (TREE_TYPE (va_list_type_node
));
4656 valist
= build_fold_addr_expr_with_type (valist
, p1
);
4665 if (! TREE_SIDE_EFFECTS (valist
))
4668 pt
= build_pointer_type (va_list_type_node
);
4669 valist
= fold_build1 (ADDR_EXPR
, pt
, valist
);
4670 TREE_SIDE_EFFECTS (valist
) = 1;
4673 if (TREE_SIDE_EFFECTS (valist
))
4674 valist
= save_expr (valist
);
4675 valist
= build_fold_indirect_ref (valist
);
4681 /* The "standard" definition of va_list is void*. */
4684 std_build_builtin_va_list (void)
4686 return ptr_type_node
;
4689 /* The "standard" implementation of va_start: just assign `nextarg' to
4693 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4695 rtx va_r
= expand_expr (valist
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4696 convert_move (va_r
, nextarg
, 0);
4699 /* Expand EXP, a call to __builtin_va_start. */
4702 expand_builtin_va_start (tree exp
)
4707 if (call_expr_nargs (exp
) < 2)
4709 error ("too few arguments to function %<va_start%>");
4713 if (fold_builtin_next_arg (exp
, true))
4716 nextarg
= expand_builtin_next_arg ();
4717 valist
= stabilize_va_list (CALL_EXPR_ARG (exp
, 0), 1);
4719 #ifdef EXPAND_BUILTIN_VA_START
4720 EXPAND_BUILTIN_VA_START (valist
, nextarg
);
4722 std_expand_builtin_va_start (valist
, nextarg
);
4728 /* The "standard" implementation of va_arg: read the value from the
4729 current (padded) address and increment by the (padded) size. */
4732 std_gimplify_va_arg_expr (tree valist
, tree type
, tree
*pre_p
, tree
*post_p
)
4734 tree addr
, t
, type_size
, rounded_size
, valist_tmp
;
4735 unsigned HOST_WIDE_INT align
, boundary
;
4738 #ifdef ARGS_GROW_DOWNWARD
4739 /* All of the alignment and movement below is for args-grow-up machines.
4740 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4741 implement their own specialized gimplify_va_arg_expr routines. */
4745 indirect
= pass_by_reference (NULL
, TYPE_MODE (type
), type
, false);
4747 type
= build_pointer_type (type
);
4749 align
= PARM_BOUNDARY
/ BITS_PER_UNIT
;
4750 boundary
= FUNCTION_ARG_BOUNDARY (TYPE_MODE (type
), type
) / BITS_PER_UNIT
;
4752 /* Hoist the valist value into a temporary for the moment. */
4753 valist_tmp
= get_initialized_tmp_var (valist
, pre_p
, NULL
);
4755 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4756 requires greater alignment, we must perform dynamic alignment. */
4757 if (boundary
> align
4758 && !integer_zerop (TYPE_SIZE (type
)))
4760 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist_tmp
,
4761 fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (valist
),
4762 valist_tmp
, size_int (boundary
- 1)));
4763 gimplify_and_add (t
, pre_p
);
4765 t
= fold_convert (sizetype
, valist_tmp
);
4766 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist_tmp
,
4767 fold_convert (TREE_TYPE (valist
),
4768 fold_build2 (BIT_AND_EXPR
, sizetype
, t
,
4769 size_int (-boundary
))));
4770 gimplify_and_add (t
, pre_p
);
4775 /* If the actual alignment is less than the alignment of the type,
4776 adjust the type accordingly so that we don't assume strict alignment
4777 when deferencing the pointer. */
4778 boundary
*= BITS_PER_UNIT
;
4779 if (boundary
< TYPE_ALIGN (type
))
4781 type
= build_variant_type_copy (type
);
4782 TYPE_ALIGN (type
) = boundary
;
4785 /* Compute the rounded size of the type. */
4786 type_size
= size_in_bytes (type
);
4787 rounded_size
= round_up (type_size
, align
);
4789 /* Reduce rounded_size so it's sharable with the postqueue. */
4790 gimplify_expr (&rounded_size
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
4794 if (PAD_VARARGS_DOWN
&& !integer_zerop (rounded_size
))
4796 /* Small args are padded downward. */
4797 t
= fold_build2 (GT_EXPR
, sizetype
, rounded_size
, size_int (align
));
4798 t
= fold_build3 (COND_EXPR
, sizetype
, t
, size_zero_node
,
4799 size_binop (MINUS_EXPR
, rounded_size
, type_size
));
4800 addr
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (addr
), addr
, t
);
4803 /* Compute new value for AP. */
4804 t
= build2 (POINTER_PLUS_EXPR
, TREE_TYPE (valist
), valist_tmp
, rounded_size
);
4805 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist
, t
);
4806 gimplify_and_add (t
, pre_p
);
4808 addr
= fold_convert (build_pointer_type (type
), addr
);
4811 addr
= build_va_arg_indirect_ref (addr
);
4813 return build_va_arg_indirect_ref (addr
);
4816 /* Build an indirect-ref expression over the given TREE, which represents a
4817 piece of a va_arg() expansion. */
4819 build_va_arg_indirect_ref (tree addr
)
4821 addr
= build_fold_indirect_ref (addr
);
4823 if (flag_mudflap
) /* Don't instrument va_arg INDIRECT_REF. */
4829 /* Return a dummy expression of type TYPE in order to keep going after an
4833 dummy_object (tree type
)
4835 tree t
= build_int_cst (build_pointer_type (type
), 0);
4836 return build1 (INDIRECT_REF
, type
, t
);
4839 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4840 builtin function, but a very special sort of operator. */
4842 enum gimplify_status
4843 gimplify_va_arg_expr (tree
*expr_p
, tree
*pre_p
, tree
*post_p
)
4845 tree promoted_type
, want_va_type
, have_va_type
;
4846 tree valist
= TREE_OPERAND (*expr_p
, 0);
4847 tree type
= TREE_TYPE (*expr_p
);
4850 /* Verify that valist is of the proper type. */
4851 want_va_type
= va_list_type_node
;
4852 have_va_type
= TREE_TYPE (valist
);
4854 if (have_va_type
== error_mark_node
)
4857 if (TREE_CODE (want_va_type
) == ARRAY_TYPE
)
4859 /* If va_list is an array type, the argument may have decayed
4860 to a pointer type, e.g. by being passed to another function.
4861 In that case, unwrap both types so that we can compare the
4862 underlying records. */
4863 if (TREE_CODE (have_va_type
) == ARRAY_TYPE
4864 || POINTER_TYPE_P (have_va_type
))
4866 want_va_type
= TREE_TYPE (want_va_type
);
4867 have_va_type
= TREE_TYPE (have_va_type
);
4871 if (TYPE_MAIN_VARIANT (want_va_type
) != TYPE_MAIN_VARIANT (have_va_type
))
4873 error ("first argument to %<va_arg%> not of type %<va_list%>");
4877 /* Generate a diagnostic for requesting data of a type that cannot
4878 be passed through `...' due to type promotion at the call site. */
4879 else if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
4882 static bool gave_help
;
4884 /* Unfortunately, this is merely undefined, rather than a constraint
4885 violation, so we cannot make this an error. If this call is never
4886 executed, the program is still strictly conforming. */
4887 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4888 type
, promoted_type
);
4892 inform ("(so you should pass %qT not %qT to %<va_arg%>)",
4893 promoted_type
, type
);
4896 /* We can, however, treat "undefined" any way we please.
4897 Call abort to encourage the user to fix the program. */
4898 inform ("if this code is reached, the program will abort");
4899 t
= build_call_expr (implicit_built_in_decls
[BUILT_IN_TRAP
], 0);
4900 append_to_statement_list (t
, pre_p
);
4902 /* This is dead code, but go ahead and finish so that the
4903 mode of the result comes out right. */
4904 *expr_p
= dummy_object (type
);
4909 /* Make it easier for the backends by protecting the valist argument
4910 from multiple evaluations. */
4911 if (TREE_CODE (va_list_type_node
) == ARRAY_TYPE
)
4913 /* For this case, the backends will be expecting a pointer to
4914 TREE_TYPE (va_list_type_node), but it's possible we've
4915 actually been given an array (an actual va_list_type_node).
4917 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4919 tree p1
= build_pointer_type (TREE_TYPE (va_list_type_node
));
4920 valist
= build_fold_addr_expr_with_type (valist
, p1
);
4922 gimplify_expr (&valist
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
4925 gimplify_expr (&valist
, pre_p
, post_p
, is_gimple_min_lval
, fb_lvalue
);
4927 if (!targetm
.gimplify_va_arg_expr
)
4928 /* FIXME:Once most targets are converted we should merely
4929 assert this is non-null. */
4932 *expr_p
= targetm
.gimplify_va_arg_expr (valist
, type
, pre_p
, post_p
);
4937 /* Expand EXP, a call to __builtin_va_end. */
4940 expand_builtin_va_end (tree exp
)
4942 tree valist
= CALL_EXPR_ARG (exp
, 0);
4944 /* Evaluate for side effects, if needed. I hate macros that don't
4946 if (TREE_SIDE_EFFECTS (valist
))
4947 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4952 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4953 builtin rather than just as an assignment in stdarg.h because of the
4954 nastiness of array-type va_list types. */
4957 expand_builtin_va_copy (tree exp
)
4961 dst
= CALL_EXPR_ARG (exp
, 0);
4962 src
= CALL_EXPR_ARG (exp
, 1);
4964 dst
= stabilize_va_list (dst
, 1);
4965 src
= stabilize_va_list (src
, 0);
4967 if (TREE_CODE (va_list_type_node
) != ARRAY_TYPE
)
4969 t
= build2 (MODIFY_EXPR
, va_list_type_node
, dst
, src
);
4970 TREE_SIDE_EFFECTS (t
) = 1;
4971 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4975 rtx dstb
, srcb
, size
;
4977 /* Evaluate to pointers. */
4978 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4979 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4980 size
= expand_expr (TYPE_SIZE_UNIT (va_list_type_node
), NULL_RTX
,
4981 VOIDmode
, EXPAND_NORMAL
);
4983 dstb
= convert_memory_address (Pmode
, dstb
);
4984 srcb
= convert_memory_address (Pmode
, srcb
);
4986 /* "Dereference" to BLKmode memories. */
4987 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4988 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4989 set_mem_align (dstb
, TYPE_ALIGN (va_list_type_node
));
4990 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4991 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4992 set_mem_align (srcb
, TYPE_ALIGN (va_list_type_node
));
4995 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
5001 /* Expand a call to one of the builtin functions __builtin_frame_address or
5002 __builtin_return_address. */
5005 expand_builtin_frame_address (tree fndecl
, tree exp
)
5007 /* The argument must be a nonnegative integer constant.
5008 It counts the number of frames to scan up the stack.
5009 The value is the return address saved in that frame. */
5010 if (call_expr_nargs (exp
) == 0)
5011 /* Warning about missing arg was already issued. */
5013 else if (! host_integerp (CALL_EXPR_ARG (exp
, 0), 1))
5015 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
5016 error ("invalid argument to %<__builtin_frame_address%>");
5018 error ("invalid argument to %<__builtin_return_address%>");
5024 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
5025 tree_low_cst (CALL_EXPR_ARG (exp
, 0), 1));
5027 /* Some ports cannot access arbitrary stack frames. */
5030 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
5031 warning (0, "unsupported argument to %<__builtin_frame_address%>");
5033 warning (0, "unsupported argument to %<__builtin_return_address%>");
5037 /* For __builtin_frame_address, return what we've got. */
5038 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
5042 && ! CONSTANT_P (tem
))
5043 tem
= copy_to_mode_reg (Pmode
, tem
);
5048 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
5049 we failed and the caller should emit a normal call, otherwise try to get
5050 the result in TARGET, if convenient. */
5053 expand_builtin_alloca (tree exp
, rtx target
)
5058 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
5059 should always expand to function calls. These can be intercepted
5064 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5067 /* Compute the argument. */
5068 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5070 /* Allocate the desired space. */
5071 result
= allocate_dynamic_stack_space (op0
, target
, BITS_PER_UNIT
);
5072 result
= convert_memory_address (ptr_mode
, result
);
5077 /* Expand a call to a bswap builtin with argument ARG0. MODE
5078 is the mode to expand with. */
5081 expand_builtin_bswap (tree exp
, rtx target
, rtx subtarget
)
5083 enum machine_mode mode
;
5087 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5090 arg
= CALL_EXPR_ARG (exp
, 0);
5091 mode
= TYPE_MODE (TREE_TYPE (arg
));
5092 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5094 target
= expand_unop (mode
, bswap_optab
, op0
, target
, 1);
5096 gcc_assert (target
);
5098 return convert_to_mode (mode
, target
, 0);
5101 /* Expand a call to a unary builtin in EXP.
5102 Return NULL_RTX if a normal call should be emitted rather than expanding the
5103 function in-line. If convenient, the result should be placed in TARGET.
5104 SUBTARGET may be used as the target for computing one of EXP's operands. */
5107 expand_builtin_unop (enum machine_mode target_mode
, tree exp
, rtx target
,
5108 rtx subtarget
, optab op_optab
)
5112 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5115 /* Compute the argument. */
5116 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
5117 VOIDmode
, EXPAND_NORMAL
);
5118 /* Compute op, into TARGET if possible.
5119 Set TARGET to wherever the result comes back. */
5120 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
5121 op_optab
, op0
, target
, 1);
5122 gcc_assert (target
);
5124 return convert_to_mode (target_mode
, target
, 0);
5127 /* If the string passed to fputs is a constant and is one character
5128 long, we attempt to transform this call into __builtin_fputc(). */
5131 expand_builtin_fputs (tree exp
, rtx target
, bool unlocked
)
5133 /* Verify the arguments in the original call. */
5134 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
5136 tree result
= fold_builtin_fputs (CALL_EXPR_ARG (exp
, 0),
5137 CALL_EXPR_ARG (exp
, 1),
5138 (target
== const0_rtx
),
5139 unlocked
, NULL_TREE
);
5141 return expand_expr (result
, target
, VOIDmode
, EXPAND_NORMAL
);
5146 /* Expand a call to __builtin_expect. We just return our argument
5147 as the builtin_expect semantic should've been already executed by
5148 tree branch prediction pass. */
5151 expand_builtin_expect (tree exp
, rtx target
)
5155 if (call_expr_nargs (exp
) < 2)
5157 arg
= CALL_EXPR_ARG (exp
, 0);
5158 c
= CALL_EXPR_ARG (exp
, 1);
5160 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5161 /* When guessing was done, the hints should be already stripped away. */
5162 gcc_assert (!flag_guess_branch_prob
5163 || optimize
== 0 || errorcount
|| sorrycount
);
5168 expand_builtin_trap (void)
5172 emit_insn (gen_trap ());
5175 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
5179 /* Expand EXP, a call to fabs, fabsf or fabsl.
5180 Return NULL_RTX if a normal call should be emitted rather than expanding
5181 the function inline. If convenient, the result should be placed
5182 in TARGET. SUBTARGET may be used as the target for computing
5186 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
5188 enum machine_mode mode
;
5192 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5195 arg
= CALL_EXPR_ARG (exp
, 0);
5196 mode
= TYPE_MODE (TREE_TYPE (arg
));
5197 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5198 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
5201 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5202 Return NULL is a normal call should be emitted rather than expanding the
5203 function inline. If convenient, the result should be placed in TARGET.
5204 SUBTARGET may be used as the target for computing the operand. */
5207 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
5212 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
5215 arg
= CALL_EXPR_ARG (exp
, 0);
5216 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5218 arg
= CALL_EXPR_ARG (exp
, 1);
5219 op1
= expand_normal (arg
);
5221 return expand_copysign (op0
, op1
, target
);
5224 /* Create a new constant string literal and return a char* pointer to it.
5225 The STRING_CST value is the LEN characters at STR. */
5227 build_string_literal (int len
, const char *str
)
5229 tree t
, elem
, index
, type
;
5231 t
= build_string (len
, str
);
5232 elem
= build_type_variant (char_type_node
, 1, 0);
5233 index
= build_index_type (build_int_cst (NULL_TREE
, len
- 1));
5234 type
= build_array_type (elem
, index
);
5235 TREE_TYPE (t
) = type
;
5236 TREE_CONSTANT (t
) = 1;
5237 TREE_INVARIANT (t
) = 1;
5238 TREE_READONLY (t
) = 1;
5239 TREE_STATIC (t
) = 1;
5241 type
= build_pointer_type (type
);
5242 t
= build1 (ADDR_EXPR
, type
, t
);
5244 type
= build_pointer_type (elem
);
5245 t
= build1 (NOP_EXPR
, type
, t
);
5249 /* Expand EXP, a call to printf or printf_unlocked.
5250 Return NULL_RTX if a normal call should be emitted rather than transforming
5251 the function inline. If convenient, the result should be placed in
5252 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5255 expand_builtin_printf (tree exp
, rtx target
, enum machine_mode mode
,
5258 /* If we're using an unlocked function, assume the other unlocked
5259 functions exist explicitly. */
5260 tree
const fn_putchar
= unlocked
? built_in_decls
[BUILT_IN_PUTCHAR_UNLOCKED
]
5261 : implicit_built_in_decls
[BUILT_IN_PUTCHAR
];
5262 tree
const fn_puts
= unlocked
? built_in_decls
[BUILT_IN_PUTS_UNLOCKED
]
5263 : implicit_built_in_decls
[BUILT_IN_PUTS
];
5264 const char *fmt_str
;
5267 int nargs
= call_expr_nargs (exp
);
5269 /* If the return value is used, don't do the transformation. */
5270 if (target
!= const0_rtx
)
5273 /* Verify the required arguments in the original call. */
5276 fmt
= CALL_EXPR_ARG (exp
, 0);
5277 if (! POINTER_TYPE_P (TREE_TYPE (fmt
)))
5280 /* Check whether the format is a literal string constant. */
5281 fmt_str
= c_getstr (fmt
);
5282 if (fmt_str
== NULL
)
5285 if (!init_target_chars ())
5288 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5289 if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
5292 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp
, 1))))
5295 fn
= build_call_expr (fn_puts
, 1, CALL_EXPR_ARG (exp
, 1));
5297 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5298 else if (strcmp (fmt_str
, target_percent_c
) == 0)
5301 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1))) != INTEGER_TYPE
)
5304 fn
= build_call_expr (fn_putchar
, 1, CALL_EXPR_ARG (exp
, 1));
5308 /* We can't handle anything else with % args or %% ... yet. */
5309 if (strchr (fmt_str
, target_percent
))
5315 /* If the format specifier was "", printf does nothing. */
5316 if (fmt_str
[0] == '\0')
5318 /* If the format specifier has length of 1, call putchar. */
5319 if (fmt_str
[1] == '\0')
5321 /* Given printf("c"), (where c is any one character,)
5322 convert "c"[0] to an int and pass that to the replacement
5324 arg
= build_int_cst (NULL_TREE
, fmt_str
[0]);
5326 fn
= build_call_expr (fn_putchar
, 1, arg
);
5330 /* If the format specifier was "string\n", call puts("string"). */
5331 size_t len
= strlen (fmt_str
);
5332 if ((unsigned char)fmt_str
[len
- 1] == target_newline
)
5334 /* Create a NUL-terminated string that's one char shorter
5335 than the original, stripping off the trailing '\n'. */
5336 char *newstr
= alloca (len
);
5337 memcpy (newstr
, fmt_str
, len
- 1);
5338 newstr
[len
- 1] = 0;
5339 arg
= build_string_literal (len
, newstr
);
5341 fn
= build_call_expr (fn_puts
, 1, arg
);
5344 /* We'd like to arrange to call fputs(string,stdout) here,
5345 but we need stdout and don't have a way to get it yet. */
5352 if (TREE_CODE (fn
) == CALL_EXPR
)
5353 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
5354 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
5357 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5358 Return NULL_RTX if a normal call should be emitted rather than transforming
5359 the function inline. If convenient, the result should be placed in
5360 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5363 expand_builtin_fprintf (tree exp
, rtx target
, enum machine_mode mode
,
5366 /* If we're using an unlocked function, assume the other unlocked
5367 functions exist explicitly. */
5368 tree
const fn_fputc
= unlocked
? built_in_decls
[BUILT_IN_FPUTC_UNLOCKED
]
5369 : implicit_built_in_decls
[BUILT_IN_FPUTC
];
5370 tree
const fn_fputs
= unlocked
? built_in_decls
[BUILT_IN_FPUTS_UNLOCKED
]
5371 : implicit_built_in_decls
[BUILT_IN_FPUTS
];
5372 const char *fmt_str
;
5375 int nargs
= call_expr_nargs (exp
);
5377 /* If the return value is used, don't do the transformation. */
5378 if (target
!= const0_rtx
)
5381 /* Verify the required arguments in the original call. */
5384 fp
= CALL_EXPR_ARG (exp
, 0);
5385 if (! POINTER_TYPE_P (TREE_TYPE (fp
)))
5387 fmt
= CALL_EXPR_ARG (exp
, 1);
5388 if (! POINTER_TYPE_P (TREE_TYPE (fmt
)))
5391 /* Check whether the format is a literal string constant. */
5392 fmt_str
= c_getstr (fmt
);
5393 if (fmt_str
== NULL
)
5396 if (!init_target_chars ())
5399 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5400 if (strcmp (fmt_str
, target_percent_s
) == 0)
5403 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp
, 2))))
5405 arg
= CALL_EXPR_ARG (exp
, 2);
5407 fn
= build_call_expr (fn_fputs
, 2, arg
, fp
);
5409 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5410 else if (strcmp (fmt_str
, target_percent_c
) == 0)
5413 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 2))) != INTEGER_TYPE
)
5415 arg
= CALL_EXPR_ARG (exp
, 2);
5417 fn
= build_call_expr (fn_fputc
, 2, arg
, fp
);
5421 /* We can't handle anything else with % args or %% ... yet. */
5422 if (strchr (fmt_str
, target_percent
))
5428 /* If the format specifier was "", fprintf does nothing. */
5429 if (fmt_str
[0] == '\0')
5431 /* Evaluate and ignore FILE* argument for side-effects. */
5432 expand_expr (fp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5436 /* When "string" doesn't contain %, replace all cases of
5437 fprintf(stream,string) with fputs(string,stream). The fputs
5438 builtin will take care of special cases like length == 1. */
5440 fn
= build_call_expr (fn_fputs
, 2, fmt
, fp
);
5445 if (TREE_CODE (fn
) == CALL_EXPR
)
5446 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
5447 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
5450 /* Expand a call EXP to sprintf. Return NULL_RTX if
5451 a normal call should be emitted rather than expanding the function
5452 inline. If convenient, the result should be placed in TARGET with
5456 expand_builtin_sprintf (tree exp
, rtx target
, enum machine_mode mode
)
5459 const char *fmt_str
;
5460 int nargs
= call_expr_nargs (exp
);
5462 /* Verify the required arguments in the original call. */
5465 dest
= CALL_EXPR_ARG (exp
, 0);
5466 if (! POINTER_TYPE_P (TREE_TYPE (dest
)))
5468 fmt
= CALL_EXPR_ARG (exp
, 0);
5469 if (! POINTER_TYPE_P (TREE_TYPE (fmt
)))
5472 /* Check whether the format is a literal string constant. */
5473 fmt_str
= c_getstr (fmt
);
5474 if (fmt_str
== NULL
)
5477 if (!init_target_chars ())
5480 /* If the format doesn't contain % args or %%, use strcpy. */
5481 if (strchr (fmt_str
, target_percent
) == 0)
5483 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
5486 if ((nargs
> 2) || ! fn
)
5488 expand_expr (build_call_expr (fn
, 2, dest
, fmt
),
5489 const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5490 if (target
== const0_rtx
)
5492 exp
= build_int_cst (NULL_TREE
, strlen (fmt_str
));
5493 return expand_expr (exp
, target
, mode
, EXPAND_NORMAL
);
5495 /* If the format is "%s", use strcpy if the result isn't used. */
5496 else if (strcmp (fmt_str
, target_percent_s
) == 0)
5499 fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
5505 arg
= CALL_EXPR_ARG (exp
, 2);
5506 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
5509 if (target
!= const0_rtx
)
5511 len
= c_strlen (arg
, 1);
5512 if (! len
|| TREE_CODE (len
) != INTEGER_CST
)
5518 expand_expr (build_call_expr (fn
, 2, dest
, arg
),
5519 const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5521 if (target
== const0_rtx
)
5523 return expand_expr (len
, target
, mode
, EXPAND_NORMAL
);
5529 /* Expand a call to either the entry or exit function profiler. */
5532 expand_builtin_profile_func (bool exitp
)
5536 this = DECL_RTL (current_function_decl
);
5537 gcc_assert (MEM_P (this));
5538 this = XEXP (this, 0);
5541 which
= profile_function_exit_libfunc
;
5543 which
= profile_function_entry_libfunc
;
5545 emit_library_call (which
, LCT_NORMAL
, VOIDmode
, 2, this, Pmode
,
5546 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS
,
5553 /* Expand a call to __builtin___clear_cache. */
5556 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED
)
5558 #ifndef HAVE_clear_cache
5559 #ifdef CLEAR_INSN_CACHE
5560 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5561 does something. Just do the default expansion to a call to
5565 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5566 does nothing. There is no need to call it. Do nothing. */
5568 #endif /* CLEAR_INSN_CACHE */
5570 /* We have a "clear_cache" insn, and it will handle everything. */
5572 rtx begin_rtx
, end_rtx
;
5573 enum insn_code icode
;
5575 /* We must not expand to a library call. If we did, any
5576 fallback library function in libgcc that might contain a call to
5577 __builtin___clear_cache() would recurse infinitely. */
5578 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
5580 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5584 if (HAVE_clear_cache
)
5586 icode
= CODE_FOR_clear_cache
;
5588 begin
= CALL_EXPR_ARG (exp
, 0);
5589 begin_rtx
= expand_expr (begin
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5590 begin_rtx
= convert_memory_address (Pmode
, begin_rtx
);
5591 if (!insn_data
[icode
].operand
[0].predicate (begin_rtx
, Pmode
))
5592 begin_rtx
= copy_to_mode_reg (Pmode
, begin_rtx
);
5594 end
= CALL_EXPR_ARG (exp
, 1);
5595 end_rtx
= expand_expr (end
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
5596 end_rtx
= convert_memory_address (Pmode
, end_rtx
);
5597 if (!insn_data
[icode
].operand
[1].predicate (end_rtx
, Pmode
))
5598 end_rtx
= copy_to_mode_reg (Pmode
, end_rtx
);
5600 emit_insn (gen_clear_cache (begin_rtx
, end_rtx
));
5603 #endif /* HAVE_clear_cache */
5606 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5609 round_trampoline_addr (rtx tramp
)
5611 rtx temp
, addend
, mask
;
5613 /* If we don't need too much alignment, we'll have been guaranteed
5614 proper alignment by get_trampoline_type. */
5615 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
5618 /* Round address up to desired boundary. */
5619 temp
= gen_reg_rtx (Pmode
);
5620 addend
= GEN_INT (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1);
5621 mask
= GEN_INT (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
);
5623 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
5624 temp
, 0, OPTAB_LIB_WIDEN
);
5625 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
5626 temp
, 0, OPTAB_LIB_WIDEN
);
5632 expand_builtin_init_trampoline (tree exp
)
5634 tree t_tramp
, t_func
, t_chain
;
5635 rtx r_tramp
, r_func
, r_chain
;
5636 #ifdef TRAMPOLINE_TEMPLATE
5640 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
5641 POINTER_TYPE
, VOID_TYPE
))
5644 t_tramp
= CALL_EXPR_ARG (exp
, 0);
5645 t_func
= CALL_EXPR_ARG (exp
, 1);
5646 t_chain
= CALL_EXPR_ARG (exp
, 2);
5648 r_tramp
= expand_normal (t_tramp
);
5649 r_func
= expand_normal (t_func
);
5650 r_chain
= expand_normal (t_chain
);
5652 /* Generate insns to initialize the trampoline. */
5653 r_tramp
= round_trampoline_addr (r_tramp
);
5654 #ifdef TRAMPOLINE_TEMPLATE
5655 blktramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
5656 set_mem_align (blktramp
, TRAMPOLINE_ALIGNMENT
);
5657 emit_block_move (blktramp
, assemble_trampoline_template (),
5658 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
5660 trampolines_created
= 1;
5661 INITIALIZE_TRAMPOLINE (r_tramp
, r_func
, r_chain
);
5667 expand_builtin_adjust_trampoline (tree exp
)
5671 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5674 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5675 tramp
= round_trampoline_addr (tramp
);
5676 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5677 TRAMPOLINE_ADJUST_ADDRESS (tramp
);
5683 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5684 function. The function first checks whether the back end provides
5685 an insn to implement signbit for the respective mode. If not, it
5686 checks whether the floating point format of the value is such that
5687 the sign bit can be extracted. If that is not the case, the
5688 function returns NULL_RTX to indicate that a normal call should be
5689 emitted rather than expanding the function in-line. EXP is the
5690 expression that is a call to the builtin function; if convenient,
5691 the result should be placed in TARGET. */
5693 expand_builtin_signbit (tree exp
, rtx target
)
5695 const struct real_format
*fmt
;
5696 enum machine_mode fmode
, imode
, rmode
;
5697 HOST_WIDE_INT hi
, lo
;
5700 enum insn_code icode
;
5703 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5706 arg
= CALL_EXPR_ARG (exp
, 0);
5707 fmode
= TYPE_MODE (TREE_TYPE (arg
));
5708 rmode
= TYPE_MODE (TREE_TYPE (exp
));
5709 fmt
= REAL_MODE_FORMAT (fmode
);
5711 arg
= builtin_save_expr (arg
);
5713 /* Expand the argument yielding a RTX expression. */
5714 temp
= expand_normal (arg
);
5716 /* Check if the back end provides an insn that handles signbit for the
5718 icode
= signbit_optab
->handlers
[(int) fmode
].insn_code
;
5719 if (icode
!= CODE_FOR_nothing
)
5721 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
5722 emit_unop_insn (icode
, target
, temp
, UNKNOWN
);
5726 /* For floating point formats without a sign bit, implement signbit
5728 bitpos
= fmt
->signbit_ro
;
5731 /* But we can't do this if the format supports signed zero. */
5732 if (fmt
->has_signed_zero
&& HONOR_SIGNED_ZEROS (fmode
))
5735 arg
= fold_build2 (LT_EXPR
, TREE_TYPE (exp
), arg
,
5736 build_real (TREE_TYPE (arg
), dconst0
));
5737 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5740 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
5742 imode
= int_mode_for_mode (fmode
);
5743 if (imode
== BLKmode
)
5745 temp
= gen_lowpart (imode
, temp
);
5750 /* Handle targets with different FP word orders. */
5751 if (FLOAT_WORDS_BIG_ENDIAN
)
5752 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
5754 word
= bitpos
/ BITS_PER_WORD
;
5755 temp
= operand_subword_force (temp
, word
, fmode
);
5756 bitpos
= bitpos
% BITS_PER_WORD
;
5759 /* Force the intermediate word_mode (or narrower) result into a
5760 register. This avoids attempting to create paradoxical SUBREGs
5761 of floating point modes below. */
5762 temp
= force_reg (imode
, temp
);
5764 /* If the bitpos is within the "result mode" lowpart, the operation
5765 can be implement with a single bitwise AND. Otherwise, we need
5766 a right shift and an AND. */
5768 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5770 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
5773 lo
= (HOST_WIDE_INT
) 1 << bitpos
;
5777 hi
= (HOST_WIDE_INT
) 1 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
5782 temp
= gen_lowpart (rmode
, temp
);
5783 temp
= expand_binop (rmode
, and_optab
, temp
,
5784 immed_double_const (lo
, hi
, rmode
),
5785 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5789 /* Perform a logical right shift to place the signbit in the least
5790 significant bit, then truncate the result to the desired mode
5791 and mask just this bit. */
5792 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
,
5793 build_int_cst (NULL_TREE
, bitpos
), NULL_RTX
, 1);
5794 temp
= gen_lowpart (rmode
, temp
);
5795 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5796 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5802 /* Expand fork or exec calls. TARGET is the desired target of the
5803 call. EXP is the call. FN is the
5804 identificator of the actual function. IGNORE is nonzero if the
5805 value is to be ignored. */
5808 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5813 /* If we are not profiling, just call the function. */
5814 if (!profile_arc_flag
)
5817 /* Otherwise call the wrapper. This should be equivalent for the rest of
5818 compiler, so the code does not diverge, and the wrapper may run the
5819 code necessary for keeping the profiling sane. */
5821 switch (DECL_FUNCTION_CODE (fn
))
5824 id
= get_identifier ("__gcov_fork");
5827 case BUILT_IN_EXECL
:
5828 id
= get_identifier ("__gcov_execl");
5831 case BUILT_IN_EXECV
:
5832 id
= get_identifier ("__gcov_execv");
5835 case BUILT_IN_EXECLP
:
5836 id
= get_identifier ("__gcov_execlp");
5839 case BUILT_IN_EXECLE
:
5840 id
= get_identifier ("__gcov_execle");
5843 case BUILT_IN_EXECVP
:
5844 id
= get_identifier ("__gcov_execvp");
5847 case BUILT_IN_EXECVE
:
5848 id
= get_identifier ("__gcov_execve");
5855 decl
= build_decl (FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5856 DECL_EXTERNAL (decl
) = 1;
5857 TREE_PUBLIC (decl
) = 1;
5858 DECL_ARTIFICIAL (decl
) = 1;
5859 TREE_NOTHROW (decl
) = 1;
5860 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5861 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5862 call
= rewrite_call_expr (exp
, 0, decl
, 0);
5863 return expand_call (call
, target
, ignore
);
5868 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5869 the pointer in these functions is void*, the tree optimizers may remove
5870 casts. The mode computed in expand_builtin isn't reliable either, due
5871 to __sync_bool_compare_and_swap.
5873 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5874 group of builtins. This gives us log2 of the mode size. */
5876 static inline enum machine_mode
5877 get_builtin_sync_mode (int fcode_diff
)
5879 /* The size is not negotiable, so ask not to get BLKmode in return
5880 if the target indicates that a smaller size would be better. */
5881 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
5884 /* Expand the memory expression LOC and return the appropriate memory operand
5885 for the builtin_sync operations. */
5888 get_builtin_sync_mem (tree loc
, enum machine_mode mode
)
5892 addr
= expand_expr (loc
, NULL_RTX
, Pmode
, EXPAND_SUM
);
5894 /* Note that we explicitly do not want any alias information for this
5895 memory, so that we kill all other live memories. Otherwise we don't
5896 satisfy the full barrier semantics of the intrinsic. */
5897 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5899 set_mem_align (mem
, get_pointer_alignment (loc
, BIGGEST_ALIGNMENT
));
5900 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5901 MEM_VOLATILE_P (mem
) = 1;
5906 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5907 EXP is the CALL_EXPR. CODE is the rtx code
5908 that corresponds to the arithmetic or logical operation from the name;
5909 an exception here is that NOT actually means NAND. TARGET is an optional
5910 place for us to store the results; AFTER is true if this is the
5911 fetch_and_xxx form. IGNORE is true if we don't actually care about
5912 the result of the operation at all. */
5915 expand_builtin_sync_operation (enum machine_mode mode
, tree exp
,
5916 enum rtx_code code
, bool after
,
5917 rtx target
, bool ignore
)
5920 enum machine_mode old_mode
;
5922 /* Expand the operands. */
5923 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5925 val
= expand_expr (CALL_EXPR_ARG (exp
, 1), NULL_RTX
, mode
, EXPAND_NORMAL
);
5926 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5927 of CONST_INTs, where we know the old_mode only from the call argument. */
5928 old_mode
= GET_MODE (val
);
5929 if (old_mode
== VOIDmode
)
5930 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1)));
5931 val
= convert_modes (mode
, old_mode
, val
, 1);
5934 return expand_sync_operation (mem
, val
, code
);
5936 return expand_sync_fetch_operation (mem
, val
, code
, after
, target
);
5939 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5940 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5941 true if this is the boolean form. TARGET is a place for us to store the
5942 results; this is NOT optional if IS_BOOL is true. */
5945 expand_builtin_compare_and_swap (enum machine_mode mode
, tree exp
,
5946 bool is_bool
, rtx target
)
5948 rtx old_val
, new_val
, mem
;
5949 enum machine_mode old_mode
;
5951 /* Expand the operands. */
5952 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5955 old_val
= expand_expr (CALL_EXPR_ARG (exp
, 1), NULL_RTX
,
5956 mode
, EXPAND_NORMAL
);
5957 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5958 of CONST_INTs, where we know the old_mode only from the call argument. */
5959 old_mode
= GET_MODE (old_val
);
5960 if (old_mode
== VOIDmode
)
5961 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1)));
5962 old_val
= convert_modes (mode
, old_mode
, old_val
, 1);
5964 new_val
= expand_expr (CALL_EXPR_ARG (exp
, 2), NULL_RTX
,
5965 mode
, EXPAND_NORMAL
);
5966 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5967 of CONST_INTs, where we know the old_mode only from the call argument. */
5968 old_mode
= GET_MODE (new_val
);
5969 if (old_mode
== VOIDmode
)
5970 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 2)));
5971 new_val
= convert_modes (mode
, old_mode
, new_val
, 1);
5974 return expand_bool_compare_and_swap (mem
, old_val
, new_val
, target
);
5976 return expand_val_compare_and_swap (mem
, old_val
, new_val
, target
);
5979 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5980 general form is actually an atomic exchange, and some targets only
5981 support a reduced form with the second argument being a constant 1.
5982 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5986 expand_builtin_lock_test_and_set (enum machine_mode mode
, tree exp
,
5990 enum machine_mode old_mode
;
5992 /* Expand the operands. */
5993 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5994 val
= expand_expr (CALL_EXPR_ARG (exp
, 1), NULL_RTX
, mode
, EXPAND_NORMAL
);
5995 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5996 of CONST_INTs, where we know the old_mode only from the call argument. */
5997 old_mode
= GET_MODE (val
);
5998 if (old_mode
== VOIDmode
)
5999 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1)));
6000 val
= convert_modes (mode
, old_mode
, val
, 1);
6002 return expand_sync_lock_test_and_set (mem
, val
, target
);
6005 /* Expand the __sync_synchronize intrinsic. */
6008 expand_builtin_synchronize (void)
6012 #ifdef HAVE_memory_barrier
6013 if (HAVE_memory_barrier
)
6015 emit_insn (gen_memory_barrier ());
6020 /* If no explicit memory barrier instruction is available, create an
6021 empty asm stmt with a memory clobber. */
6022 x
= build4 (ASM_EXPR
, void_type_node
, build_string (0, ""), NULL
, NULL
,
6023 tree_cons (NULL
, build_string (6, "memory"), NULL
));
6024 ASM_VOLATILE_P (x
) = 1;
6025 expand_asm_expr (x
);
6028 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6031 expand_builtin_lock_release (enum machine_mode mode
, tree exp
)
6033 enum insn_code icode
;
6035 rtx val
= const0_rtx
;
6037 /* Expand the operands. */
6038 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
6040 /* If there is an explicit operation in the md file, use it. */
6041 icode
= sync_lock_release
[mode
];
6042 if (icode
!= CODE_FOR_nothing
)
6044 if (!insn_data
[icode
].operand
[1].predicate (val
, mode
))
6045 val
= force_reg (mode
, val
);
6047 insn
= GEN_FCN (icode
) (mem
, val
);
6055 /* Otherwise we can implement this operation by emitting a barrier
6056 followed by a store of zero. */
6057 expand_builtin_synchronize ();
6058 emit_move_insn (mem
, val
);
6061 /* Expand an expression EXP that calls a built-in function,
6062 with result going to TARGET if that's convenient
6063 (and in mode MODE if that's convenient).
6064 SUBTARGET may be used as the target for computing one of EXP's operands.
6065 IGNORE is nonzero if the value is to be ignored. */
6068 expand_builtin (tree exp
, rtx target
, rtx subtarget
, enum machine_mode mode
,
6071 tree fndecl
= get_callee_fndecl (exp
);
6072 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
6073 enum machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
6075 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6076 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
6078 /* When not optimizing, generate calls to library functions for a certain
6081 && !called_as_built_in (fndecl
)
6082 && DECL_ASSEMBLER_NAME_SET_P (fndecl
)
6083 && fcode
!= BUILT_IN_ALLOCA
)
6084 return expand_call (exp
, target
, ignore
);
6086 /* The built-in function expanders test for target == const0_rtx
6087 to determine whether the function's result will be ignored. */
6089 target
= const0_rtx
;
6091 /* If the result of a pure or const built-in function is ignored, and
6092 none of its arguments are volatile, we can avoid expanding the
6093 built-in call and just evaluate the arguments for side-effects. */
6094 if (target
== const0_rtx
6095 && (DECL_IS_PURE (fndecl
) || TREE_READONLY (fndecl
)))
6097 bool volatilep
= false;
6099 call_expr_arg_iterator iter
;
6101 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
6102 if (TREE_THIS_VOLATILE (arg
))
6110 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
6111 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
6118 CASE_FLT_FN (BUILT_IN_FABS
):
6119 target
= expand_builtin_fabs (exp
, target
, subtarget
);
6124 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
6125 target
= expand_builtin_copysign (exp
, target
, subtarget
);
6130 /* Just do a normal library call if we were unable to fold
6132 CASE_FLT_FN (BUILT_IN_CABS
):
6135 CASE_FLT_FN (BUILT_IN_EXP
):
6136 CASE_FLT_FN (BUILT_IN_EXP10
):
6137 CASE_FLT_FN (BUILT_IN_POW10
):
6138 CASE_FLT_FN (BUILT_IN_EXP2
):
6139 CASE_FLT_FN (BUILT_IN_EXPM1
):
6140 CASE_FLT_FN (BUILT_IN_LOGB
):
6141 CASE_FLT_FN (BUILT_IN_LOG
):
6142 CASE_FLT_FN (BUILT_IN_LOG10
):
6143 CASE_FLT_FN (BUILT_IN_LOG2
):
6144 CASE_FLT_FN (BUILT_IN_LOG1P
):
6145 CASE_FLT_FN (BUILT_IN_TAN
):
6146 CASE_FLT_FN (BUILT_IN_ASIN
):
6147 CASE_FLT_FN (BUILT_IN_ACOS
):
6148 CASE_FLT_FN (BUILT_IN_ATAN
):
6149 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6150 because of possible accuracy problems. */
6151 if (! flag_unsafe_math_optimizations
)
6153 CASE_FLT_FN (BUILT_IN_SQRT
):
6154 CASE_FLT_FN (BUILT_IN_FLOOR
):
6155 CASE_FLT_FN (BUILT_IN_CEIL
):
6156 CASE_FLT_FN (BUILT_IN_TRUNC
):
6157 CASE_FLT_FN (BUILT_IN_ROUND
):
6158 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
6159 CASE_FLT_FN (BUILT_IN_RINT
):
6160 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
6165 CASE_FLT_FN (BUILT_IN_ILOGB
):
6166 if (! flag_unsafe_math_optimizations
)
6168 CASE_FLT_FN (BUILT_IN_ISINF
):
6169 CASE_FLT_FN (BUILT_IN_FINITE
):
6170 case BUILT_IN_ISFINITE
:
6171 case BUILT_IN_ISNORMAL
:
6172 target
= expand_builtin_interclass_mathfn (exp
, target
, subtarget
);
6177 CASE_FLT_FN (BUILT_IN_LCEIL
):
6178 CASE_FLT_FN (BUILT_IN_LLCEIL
):
6179 CASE_FLT_FN (BUILT_IN_LFLOOR
):
6180 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
6181 target
= expand_builtin_int_roundingfn (exp
, target
, subtarget
);
6186 CASE_FLT_FN (BUILT_IN_LRINT
):
6187 CASE_FLT_FN (BUILT_IN_LLRINT
):
6188 CASE_FLT_FN (BUILT_IN_LROUND
):
6189 CASE_FLT_FN (BUILT_IN_LLROUND
):
6190 target
= expand_builtin_int_roundingfn_2 (exp
, target
, subtarget
);
6195 CASE_FLT_FN (BUILT_IN_POW
):
6196 target
= expand_builtin_pow (exp
, target
, subtarget
);
6201 CASE_FLT_FN (BUILT_IN_POWI
):
6202 target
= expand_builtin_powi (exp
, target
, subtarget
);
6207 CASE_FLT_FN (BUILT_IN_ATAN2
):
6208 CASE_FLT_FN (BUILT_IN_LDEXP
):
6209 CASE_FLT_FN (BUILT_IN_SCALB
):
6210 CASE_FLT_FN (BUILT_IN_SCALBN
):
6211 CASE_FLT_FN (BUILT_IN_SCALBLN
):
6212 if (! flag_unsafe_math_optimizations
)
6215 CASE_FLT_FN (BUILT_IN_FMOD
):
6216 CASE_FLT_FN (BUILT_IN_REMAINDER
):
6217 CASE_FLT_FN (BUILT_IN_DREM
):
6218 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
6223 CASE_FLT_FN (BUILT_IN_CEXPI
):
6224 target
= expand_builtin_cexpi (exp
, target
, subtarget
);
6225 gcc_assert (target
);
6228 CASE_FLT_FN (BUILT_IN_SIN
):
6229 CASE_FLT_FN (BUILT_IN_COS
):
6230 if (! flag_unsafe_math_optimizations
)
6232 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
6237 CASE_FLT_FN (BUILT_IN_SINCOS
):
6238 if (! flag_unsafe_math_optimizations
)
6240 target
= expand_builtin_sincos (exp
);
6245 case BUILT_IN_APPLY_ARGS
:
6246 return expand_builtin_apply_args ();
6248 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6249 FUNCTION with a copy of the parameters described by
6250 ARGUMENTS, and ARGSIZE. It returns a block of memory
6251 allocated on the stack into which is stored all the registers
6252 that might possibly be used for returning the result of a
6253 function. ARGUMENTS is the value returned by
6254 __builtin_apply_args. ARGSIZE is the number of bytes of
6255 arguments that must be copied. ??? How should this value be
6256 computed? We'll also need a safe worst case value for varargs
6258 case BUILT_IN_APPLY
:
6259 if (!validate_arglist (exp
, POINTER_TYPE
,
6260 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
6261 && !validate_arglist (exp
, REFERENCE_TYPE
,
6262 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6268 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
6269 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
6270 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
6272 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6275 /* __builtin_return (RESULT) causes the function to return the
6276 value described by RESULT. RESULT is address of the block of
6277 memory returned by __builtin_apply. */
6278 case BUILT_IN_RETURN
:
6279 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6280 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6283 case BUILT_IN_SAVEREGS
:
6284 return expand_builtin_saveregs ();
6286 case BUILT_IN_ARGS_INFO
:
6287 return expand_builtin_args_info (exp
);
6289 case BUILT_IN_VA_ARG_PACK
:
6290 /* All valid uses of __builtin_va_arg_pack () are removed during
6292 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp
);
6295 case BUILT_IN_VA_ARG_PACK_LEN
:
6296 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6298 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp
);
6301 /* Return the address of the first anonymous stack arg. */
6302 case BUILT_IN_NEXT_ARG
:
6303 if (fold_builtin_next_arg (exp
, false))
6305 return expand_builtin_next_arg ();
6307 case BUILT_IN_CLEAR_CACHE
:
6308 target
= expand_builtin___clear_cache (exp
);
6313 case BUILT_IN_CLASSIFY_TYPE
:
6314 return expand_builtin_classify_type (exp
);
6316 case BUILT_IN_CONSTANT_P
:
6319 case BUILT_IN_FRAME_ADDRESS
:
6320 case BUILT_IN_RETURN_ADDRESS
:
6321 return expand_builtin_frame_address (fndecl
, exp
);
6323 /* Returns the address of the area where the structure is returned.
6325 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6326 if (call_expr_nargs (exp
) != 0
6327 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6328 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6331 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6333 case BUILT_IN_ALLOCA
:
6334 target
= expand_builtin_alloca (exp
, target
);
6339 case BUILT_IN_STACK_SAVE
:
6340 return expand_stack_save ();
6342 case BUILT_IN_STACK_RESTORE
:
6343 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6346 case BUILT_IN_BSWAP32
:
6347 case BUILT_IN_BSWAP64
:
6348 target
= expand_builtin_bswap (exp
, target
, subtarget
);
6354 CASE_INT_FN (BUILT_IN_FFS
):
6355 case BUILT_IN_FFSIMAX
:
6356 target
= expand_builtin_unop (target_mode
, exp
, target
,
6357 subtarget
, ffs_optab
);
6362 CASE_INT_FN (BUILT_IN_CLZ
):
6363 case BUILT_IN_CLZIMAX
:
6364 target
= expand_builtin_unop (target_mode
, exp
, target
,
6365 subtarget
, clz_optab
);
6370 CASE_INT_FN (BUILT_IN_CTZ
):
6371 case BUILT_IN_CTZIMAX
:
6372 target
= expand_builtin_unop (target_mode
, exp
, target
,
6373 subtarget
, ctz_optab
);
6378 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6379 case BUILT_IN_POPCOUNTIMAX
:
6380 target
= expand_builtin_unop (target_mode
, exp
, target
,
6381 subtarget
, popcount_optab
);
6386 CASE_INT_FN (BUILT_IN_PARITY
):
6387 case BUILT_IN_PARITYIMAX
:
6388 target
= expand_builtin_unop (target_mode
, exp
, target
,
6389 subtarget
, parity_optab
);
6394 case BUILT_IN_STRLEN
:
6395 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6400 case BUILT_IN_STRCPY
:
6401 target
= expand_builtin_strcpy (fndecl
, exp
, target
, mode
);
6406 case BUILT_IN_STRNCPY
:
6407 target
= expand_builtin_strncpy (exp
, target
, mode
);
6412 case BUILT_IN_STPCPY
:
6413 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6418 case BUILT_IN_STRCAT
:
6419 target
= expand_builtin_strcat (fndecl
, exp
, target
, mode
);
6424 case BUILT_IN_STRNCAT
:
6425 target
= expand_builtin_strncat (exp
, target
, mode
);
6430 case BUILT_IN_STRSPN
:
6431 target
= expand_builtin_strspn (exp
, target
, mode
);
6436 case BUILT_IN_STRCSPN
:
6437 target
= expand_builtin_strcspn (exp
, target
, mode
);
6442 case BUILT_IN_STRSTR
:
6443 target
= expand_builtin_strstr (exp
, target
, mode
);
6448 case BUILT_IN_STRPBRK
:
6449 target
= expand_builtin_strpbrk (exp
, target
, mode
);
6454 case BUILT_IN_INDEX
:
6455 case BUILT_IN_STRCHR
:
6456 target
= expand_builtin_strchr (exp
, target
, mode
);
6461 case BUILT_IN_RINDEX
:
6462 case BUILT_IN_STRRCHR
:
6463 target
= expand_builtin_strrchr (exp
, target
, mode
);
6468 case BUILT_IN_MEMCPY
:
6469 target
= expand_builtin_memcpy (exp
, target
, mode
);
6474 case BUILT_IN_MEMPCPY
:
6475 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6480 case BUILT_IN_MEMMOVE
:
6481 target
= expand_builtin_memmove (exp
, target
, mode
, ignore
);
6486 case BUILT_IN_BCOPY
:
6487 target
= expand_builtin_bcopy (exp
, ignore
);
6492 case BUILT_IN_MEMSET
:
6493 target
= expand_builtin_memset (exp
, target
, mode
);
6498 case BUILT_IN_BZERO
:
6499 target
= expand_builtin_bzero (exp
);
6504 case BUILT_IN_STRCMP
:
6505 target
= expand_builtin_strcmp (exp
, target
, mode
);
6510 case BUILT_IN_STRNCMP
:
6511 target
= expand_builtin_strncmp (exp
, target
, mode
);
6516 case BUILT_IN_MEMCHR
:
6517 target
= expand_builtin_memchr (exp
, target
, mode
);
6523 case BUILT_IN_MEMCMP
:
6524 target
= expand_builtin_memcmp (exp
, target
, mode
);
6529 case BUILT_IN_SETJMP
:
6530 /* This should have been lowered to the builtins below. */
6533 case BUILT_IN_SETJMP_SETUP
:
6534 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6535 and the receiver label. */
6536 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6538 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6539 VOIDmode
, EXPAND_NORMAL
);
6540 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6541 rtx label_r
= label_rtx (label
);
6543 /* This is copied from the handling of non-local gotos. */
6544 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6545 nonlocal_goto_handler_labels
6546 = gen_rtx_EXPR_LIST (VOIDmode
, label_r
,
6547 nonlocal_goto_handler_labels
);
6548 /* ??? Do not let expand_label treat us as such since we would
6549 not want to be both on the list of non-local labels and on
6550 the list of forced labels. */
6551 FORCED_LABEL (label
) = 0;
6556 case BUILT_IN_SETJMP_DISPATCHER
:
6557 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6558 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6560 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6561 rtx label_r
= label_rtx (label
);
6563 /* Remove the dispatcher label from the list of non-local labels
6564 since the receiver labels have been added to it above. */
6565 remove_node_from_expr_list (label_r
, &nonlocal_goto_handler_labels
);
6570 case BUILT_IN_SETJMP_RECEIVER
:
6571 /* __builtin_setjmp_receiver is passed the receiver label. */
6572 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6574 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6575 rtx label_r
= label_rtx (label
);
6577 expand_builtin_setjmp_receiver (label_r
);
6582 /* __builtin_longjmp is passed a pointer to an array of five words.
6583 It's similar to the C library longjmp function but works with
6584 __builtin_setjmp above. */
6585 case BUILT_IN_LONGJMP
:
6586 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6588 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6589 VOIDmode
, EXPAND_NORMAL
);
6590 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6592 if (value
!= const1_rtx
)
6594 error ("%<__builtin_longjmp%> second argument must be 1");
6598 expand_builtin_longjmp (buf_addr
, value
);
6603 case BUILT_IN_NONLOCAL_GOTO
:
6604 target
= expand_builtin_nonlocal_goto (exp
);
6609 /* This updates the setjmp buffer that is its argument with the value
6610 of the current stack pointer. */
6611 case BUILT_IN_UPDATE_SETJMP_BUF
:
6612 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6615 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6617 expand_builtin_update_setjmp_buf (buf_addr
);
6623 expand_builtin_trap ();
6626 case BUILT_IN_PRINTF
:
6627 target
= expand_builtin_printf (exp
, target
, mode
, false);
6632 case BUILT_IN_PRINTF_UNLOCKED
:
6633 target
= expand_builtin_printf (exp
, target
, mode
, true);
6638 case BUILT_IN_FPUTS
:
6639 target
= expand_builtin_fputs (exp
, target
, false);
6643 case BUILT_IN_FPUTS_UNLOCKED
:
6644 target
= expand_builtin_fputs (exp
, target
, true);
6649 case BUILT_IN_FPRINTF
:
6650 target
= expand_builtin_fprintf (exp
, target
, mode
, false);
6655 case BUILT_IN_FPRINTF_UNLOCKED
:
6656 target
= expand_builtin_fprintf (exp
, target
, mode
, true);
6661 case BUILT_IN_SPRINTF
:
6662 target
= expand_builtin_sprintf (exp
, target
, mode
);
6667 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6668 case BUILT_IN_SIGNBITD32
:
6669 case BUILT_IN_SIGNBITD64
:
6670 case BUILT_IN_SIGNBITD128
:
6671 target
= expand_builtin_signbit (exp
, target
);
6676 /* Various hooks for the DWARF 2 __throw routine. */
6677 case BUILT_IN_UNWIND_INIT
:
6678 expand_builtin_unwind_init ();
6680 case BUILT_IN_DWARF_CFA
:
6681 return virtual_cfa_rtx
;
6682 #ifdef DWARF2_UNWIND_INFO
6683 case BUILT_IN_DWARF_SP_COLUMN
:
6684 return expand_builtin_dwarf_sp_column ();
6685 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6686 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6689 case BUILT_IN_FROB_RETURN_ADDR
:
6690 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6691 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6692 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6693 case BUILT_IN_EH_RETURN
:
6694 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6695 CALL_EXPR_ARG (exp
, 1));
6697 #ifdef EH_RETURN_DATA_REGNO
6698 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6699 return expand_builtin_eh_return_data_regno (exp
);
6701 case BUILT_IN_EXTEND_POINTER
:
6702 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6704 case BUILT_IN_VA_START
:
6705 case BUILT_IN_STDARG_START
:
6706 return expand_builtin_va_start (exp
);
6707 case BUILT_IN_VA_END
:
6708 return expand_builtin_va_end (exp
);
6709 case BUILT_IN_VA_COPY
:
6710 return expand_builtin_va_copy (exp
);
6711 case BUILT_IN_EXPECT
:
6712 return expand_builtin_expect (exp
, target
);
6713 case BUILT_IN_PREFETCH
:
6714 expand_builtin_prefetch (exp
);
6717 case BUILT_IN_PROFILE_FUNC_ENTER
:
6718 return expand_builtin_profile_func (false);
6719 case BUILT_IN_PROFILE_FUNC_EXIT
:
6720 return expand_builtin_profile_func (true);
6722 case BUILT_IN_INIT_TRAMPOLINE
:
6723 return expand_builtin_init_trampoline (exp
);
6724 case BUILT_IN_ADJUST_TRAMPOLINE
:
6725 return expand_builtin_adjust_trampoline (exp
);
6728 case BUILT_IN_EXECL
:
6729 case BUILT_IN_EXECV
:
6730 case BUILT_IN_EXECLP
:
6731 case BUILT_IN_EXECLE
:
6732 case BUILT_IN_EXECVP
:
6733 case BUILT_IN_EXECVE
:
6734 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6739 case BUILT_IN_FETCH_AND_ADD_1
:
6740 case BUILT_IN_FETCH_AND_ADD_2
:
6741 case BUILT_IN_FETCH_AND_ADD_4
:
6742 case BUILT_IN_FETCH_AND_ADD_8
:
6743 case BUILT_IN_FETCH_AND_ADD_16
:
6744 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_ADD_1
);
6745 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
,
6746 false, target
, ignore
);
6751 case BUILT_IN_FETCH_AND_SUB_1
:
6752 case BUILT_IN_FETCH_AND_SUB_2
:
6753 case BUILT_IN_FETCH_AND_SUB_4
:
6754 case BUILT_IN_FETCH_AND_SUB_8
:
6755 case BUILT_IN_FETCH_AND_SUB_16
:
6756 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_SUB_1
);
6757 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
,
6758 false, target
, ignore
);
6763 case BUILT_IN_FETCH_AND_OR_1
:
6764 case BUILT_IN_FETCH_AND_OR_2
:
6765 case BUILT_IN_FETCH_AND_OR_4
:
6766 case BUILT_IN_FETCH_AND_OR_8
:
6767 case BUILT_IN_FETCH_AND_OR_16
:
6768 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_OR_1
);
6769 target
= expand_builtin_sync_operation (mode
, exp
, IOR
,
6770 false, target
, ignore
);
6775 case BUILT_IN_FETCH_AND_AND_1
:
6776 case BUILT_IN_FETCH_AND_AND_2
:
6777 case BUILT_IN_FETCH_AND_AND_4
:
6778 case BUILT_IN_FETCH_AND_AND_8
:
6779 case BUILT_IN_FETCH_AND_AND_16
:
6780 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_AND_1
);
6781 target
= expand_builtin_sync_operation (mode
, exp
, AND
,
6782 false, target
, ignore
);
6787 case BUILT_IN_FETCH_AND_XOR_1
:
6788 case BUILT_IN_FETCH_AND_XOR_2
:
6789 case BUILT_IN_FETCH_AND_XOR_4
:
6790 case BUILT_IN_FETCH_AND_XOR_8
:
6791 case BUILT_IN_FETCH_AND_XOR_16
:
6792 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_XOR_1
);
6793 target
= expand_builtin_sync_operation (mode
, exp
, XOR
,
6794 false, target
, ignore
);
6799 case BUILT_IN_FETCH_AND_NAND_1
:
6800 case BUILT_IN_FETCH_AND_NAND_2
:
6801 case BUILT_IN_FETCH_AND_NAND_4
:
6802 case BUILT_IN_FETCH_AND_NAND_8
:
6803 case BUILT_IN_FETCH_AND_NAND_16
:
6804 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_NAND_1
);
6805 target
= expand_builtin_sync_operation (mode
, exp
, NOT
,
6806 false, target
, ignore
);
6811 case BUILT_IN_ADD_AND_FETCH_1
:
6812 case BUILT_IN_ADD_AND_FETCH_2
:
6813 case BUILT_IN_ADD_AND_FETCH_4
:
6814 case BUILT_IN_ADD_AND_FETCH_8
:
6815 case BUILT_IN_ADD_AND_FETCH_16
:
6816 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ADD_AND_FETCH_1
);
6817 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
,
6818 true, target
, ignore
);
6823 case BUILT_IN_SUB_AND_FETCH_1
:
6824 case BUILT_IN_SUB_AND_FETCH_2
:
6825 case BUILT_IN_SUB_AND_FETCH_4
:
6826 case BUILT_IN_SUB_AND_FETCH_8
:
6827 case BUILT_IN_SUB_AND_FETCH_16
:
6828 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SUB_AND_FETCH_1
);
6829 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
,
6830 true, target
, ignore
);
6835 case BUILT_IN_OR_AND_FETCH_1
:
6836 case BUILT_IN_OR_AND_FETCH_2
:
6837 case BUILT_IN_OR_AND_FETCH_4
:
6838 case BUILT_IN_OR_AND_FETCH_8
:
6839 case BUILT_IN_OR_AND_FETCH_16
:
6840 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_OR_AND_FETCH_1
);
6841 target
= expand_builtin_sync_operation (mode
, exp
, IOR
,
6842 true, target
, ignore
);
6847 case BUILT_IN_AND_AND_FETCH_1
:
6848 case BUILT_IN_AND_AND_FETCH_2
:
6849 case BUILT_IN_AND_AND_FETCH_4
:
6850 case BUILT_IN_AND_AND_FETCH_8
:
6851 case BUILT_IN_AND_AND_FETCH_16
:
6852 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_AND_AND_FETCH_1
);
6853 target
= expand_builtin_sync_operation (mode
, exp
, AND
,
6854 true, target
, ignore
);
6859 case BUILT_IN_XOR_AND_FETCH_1
:
6860 case BUILT_IN_XOR_AND_FETCH_2
:
6861 case BUILT_IN_XOR_AND_FETCH_4
:
6862 case BUILT_IN_XOR_AND_FETCH_8
:
6863 case BUILT_IN_XOR_AND_FETCH_16
:
6864 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_XOR_AND_FETCH_1
);
6865 target
= expand_builtin_sync_operation (mode
, exp
, XOR
,
6866 true, target
, ignore
);
6871 case BUILT_IN_NAND_AND_FETCH_1
:
6872 case BUILT_IN_NAND_AND_FETCH_2
:
6873 case BUILT_IN_NAND_AND_FETCH_4
:
6874 case BUILT_IN_NAND_AND_FETCH_8
:
6875 case BUILT_IN_NAND_AND_FETCH_16
:
6876 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_NAND_AND_FETCH_1
);
6877 target
= expand_builtin_sync_operation (mode
, exp
, NOT
,
6878 true, target
, ignore
);
6883 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1
:
6884 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2
:
6885 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4
:
6886 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8
:
6887 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16
:
6888 if (mode
== VOIDmode
)
6889 mode
= TYPE_MODE (boolean_type_node
);
6890 if (!target
|| !register_operand (target
, mode
))
6891 target
= gen_reg_rtx (mode
);
6893 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_BOOL_COMPARE_AND_SWAP_1
);
6894 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6899 case BUILT_IN_VAL_COMPARE_AND_SWAP_1
:
6900 case BUILT_IN_VAL_COMPARE_AND_SWAP_2
:
6901 case BUILT_IN_VAL_COMPARE_AND_SWAP_4
:
6902 case BUILT_IN_VAL_COMPARE_AND_SWAP_8
:
6903 case BUILT_IN_VAL_COMPARE_AND_SWAP_16
:
6904 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_VAL_COMPARE_AND_SWAP_1
);
6905 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6910 case BUILT_IN_LOCK_TEST_AND_SET_1
:
6911 case BUILT_IN_LOCK_TEST_AND_SET_2
:
6912 case BUILT_IN_LOCK_TEST_AND_SET_4
:
6913 case BUILT_IN_LOCK_TEST_AND_SET_8
:
6914 case BUILT_IN_LOCK_TEST_AND_SET_16
:
6915 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_LOCK_TEST_AND_SET_1
);
6916 target
= expand_builtin_lock_test_and_set (mode
, exp
, target
);
6921 case BUILT_IN_LOCK_RELEASE_1
:
6922 case BUILT_IN_LOCK_RELEASE_2
:
6923 case BUILT_IN_LOCK_RELEASE_4
:
6924 case BUILT_IN_LOCK_RELEASE_8
:
6925 case BUILT_IN_LOCK_RELEASE_16
:
6926 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_LOCK_RELEASE_1
);
6927 expand_builtin_lock_release (mode
, exp
);
6930 case BUILT_IN_SYNCHRONIZE
:
6931 expand_builtin_synchronize ();
6934 case BUILT_IN_OBJECT_SIZE
:
6935 return expand_builtin_object_size (exp
);
6937 case BUILT_IN_MEMCPY_CHK
:
6938 case BUILT_IN_MEMPCPY_CHK
:
6939 case BUILT_IN_MEMMOVE_CHK
:
6940 case BUILT_IN_MEMSET_CHK
:
6941 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
6946 case BUILT_IN_STRCPY_CHK
:
6947 case BUILT_IN_STPCPY_CHK
:
6948 case BUILT_IN_STRNCPY_CHK
:
6949 case BUILT_IN_STRCAT_CHK
:
6950 case BUILT_IN_STRNCAT_CHK
:
6951 case BUILT_IN_SNPRINTF_CHK
:
6952 case BUILT_IN_VSNPRINTF_CHK
:
6953 maybe_emit_chk_warning (exp
, fcode
);
6956 case BUILT_IN_SPRINTF_CHK
:
6957 case BUILT_IN_VSPRINTF_CHK
:
6958 maybe_emit_sprintf_chk_warning (exp
, fcode
);
6961 default: /* just do library call, if unknown builtin */
6965 /* The switch statement above can drop through to cause the function
6966 to be called normally. */
6967 return expand_call (exp
, target
, ignore
);
6970 /* Determine whether a tree node represents a call to a built-in
6971 function. If the tree T is a call to a built-in function with
6972 the right number of arguments of the appropriate types, return
6973 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6974 Otherwise the return value is END_BUILTINS. */
6976 enum built_in_function
6977 builtin_mathfn_code (const_tree t
)
6979 const_tree fndecl
, arg
, parmlist
;
6980 const_tree argtype
, parmtype
;
6981 const_call_expr_arg_iterator iter
;
6983 if (TREE_CODE (t
) != CALL_EXPR
6984 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
6985 return END_BUILTINS
;
6987 fndecl
= get_callee_fndecl (t
);
6988 if (fndecl
== NULL_TREE
6989 || TREE_CODE (fndecl
) != FUNCTION_DECL
6990 || ! DECL_BUILT_IN (fndecl
)
6991 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6992 return END_BUILTINS
;
6994 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
6995 init_const_call_expr_arg_iterator (t
, &iter
);
6996 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
6998 /* If a function doesn't take a variable number of arguments,
6999 the last element in the list will have type `void'. */
7000 parmtype
= TREE_VALUE (parmlist
);
7001 if (VOID_TYPE_P (parmtype
))
7003 if (more_const_call_expr_args_p (&iter
))
7004 return END_BUILTINS
;
7005 return DECL_FUNCTION_CODE (fndecl
);
7008 if (! more_const_call_expr_args_p (&iter
))
7009 return END_BUILTINS
;
7011 arg
= next_const_call_expr_arg (&iter
);
7012 argtype
= TREE_TYPE (arg
);
7014 if (SCALAR_FLOAT_TYPE_P (parmtype
))
7016 if (! SCALAR_FLOAT_TYPE_P (argtype
))
7017 return END_BUILTINS
;
7019 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
7021 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
7022 return END_BUILTINS
;
7024 else if (POINTER_TYPE_P (parmtype
))
7026 if (! POINTER_TYPE_P (argtype
))
7027 return END_BUILTINS
;
7029 else if (INTEGRAL_TYPE_P (parmtype
))
7031 if (! INTEGRAL_TYPE_P (argtype
))
7032 return END_BUILTINS
;
7035 return END_BUILTINS
;
7038 /* Variable-length argument list. */
7039 return DECL_FUNCTION_CODE (fndecl
);
7042 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7043 evaluate to a constant. */
7046 fold_builtin_constant_p (tree arg
)
7048 /* We return 1 for a numeric type that's known to be a constant
7049 value at compile-time or for an aggregate type that's a
7050 literal constant. */
7053 /* If we know this is a constant, emit the constant of one. */
7054 if (CONSTANT_CLASS_P (arg
)
7055 || (TREE_CODE (arg
) == CONSTRUCTOR
7056 && TREE_CONSTANT (arg
)))
7057 return integer_one_node
;
7058 if (TREE_CODE (arg
) == ADDR_EXPR
)
7060 tree op
= TREE_OPERAND (arg
, 0);
7061 if (TREE_CODE (op
) == STRING_CST
7062 || (TREE_CODE (op
) == ARRAY_REF
7063 && integer_zerop (TREE_OPERAND (op
, 1))
7064 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
7065 return integer_one_node
;
7068 /* If this expression has side effects, show we don't know it to be a
7069 constant. Likewise if it's a pointer or aggregate type since in
7070 those case we only want literals, since those are only optimized
7071 when generating RTL, not later.
7072 And finally, if we are compiling an initializer, not code, we
7073 need to return a definite result now; there's not going to be any
7074 more optimization done. */
7075 if (TREE_SIDE_EFFECTS (arg
)
7076 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
7077 || POINTER_TYPE_P (TREE_TYPE (arg
))
7079 || folding_initializer
)
7080 return integer_zero_node
;
7085 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7086 return it as a truthvalue. */
7089 build_builtin_expect_predicate (tree pred
, tree expected
)
7091 tree fn
, arg_types
, pred_type
, expected_type
, call_expr
, ret_type
;
7093 fn
= built_in_decls
[BUILT_IN_EXPECT
];
7094 arg_types
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
7095 ret_type
= TREE_TYPE (TREE_TYPE (fn
));
7096 pred_type
= TREE_VALUE (arg_types
);
7097 expected_type
= TREE_VALUE (TREE_CHAIN (arg_types
));
7099 pred
= fold_convert (pred_type
, pred
);
7100 expected
= fold_convert (expected_type
, expected
);
7101 call_expr
= build_call_expr (fn
, 2, pred
, expected
);
7103 return build2 (NE_EXPR
, TREE_TYPE (pred
), call_expr
,
7104 build_int_cst (ret_type
, 0));
7107 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7108 NULL_TREE if no simplification is possible. */
7111 fold_builtin_expect (tree arg0
, tree arg1
)
7114 enum tree_code code
;
7116 /* If this is a builtin_expect within a builtin_expect keep the
7117 inner one. See through a comparison against a constant. It
7118 might have been added to create a thruthvalue. */
7120 if (COMPARISON_CLASS_P (inner
)
7121 && TREE_CODE (TREE_OPERAND (inner
, 1)) == INTEGER_CST
)
7122 inner
= TREE_OPERAND (inner
, 0);
7124 if (TREE_CODE (inner
) == CALL_EXPR
7125 && (fndecl
= get_callee_fndecl (inner
))
7126 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
7127 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_EXPECT
)
7130 /* Distribute the expected value over short-circuiting operators.
7131 See through the cast from truthvalue_type_node to long. */
7133 while (TREE_CODE (inner
) == NOP_EXPR
7134 && INTEGRAL_TYPE_P (TREE_TYPE (inner
))
7135 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner
, 0))))
7136 inner
= TREE_OPERAND (inner
, 0);
7138 code
= TREE_CODE (inner
);
7139 if (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
)
7141 tree op0
= TREE_OPERAND (inner
, 0);
7142 tree op1
= TREE_OPERAND (inner
, 1);
7144 op0
= build_builtin_expect_predicate (op0
, arg1
);
7145 op1
= build_builtin_expect_predicate (op1
, arg1
);
7146 inner
= build2 (code
, TREE_TYPE (inner
), op0
, op1
);
7148 return fold_convert (TREE_TYPE (arg0
), inner
);
7151 /* If the argument isn't invariant then there's nothing else we can do. */
7152 if (!TREE_INVARIANT (arg0
))
7155 /* If we expect that a comparison against the argument will fold to
7156 a constant return the constant. In practice, this means a true
7157 constant or the address of a non-weak symbol. */
7160 if (TREE_CODE (inner
) == ADDR_EXPR
)
7164 inner
= TREE_OPERAND (inner
, 0);
7166 while (TREE_CODE (inner
) == COMPONENT_REF
7167 || TREE_CODE (inner
) == ARRAY_REF
);
7168 if (DECL_P (inner
) && DECL_WEAK (inner
))
7172 /* Otherwise, ARG0 already has the proper type for the return value. */
7176 /* Fold a call to __builtin_classify_type with argument ARG. */
7179 fold_builtin_classify_type (tree arg
)
7182 return build_int_cst (NULL_TREE
, no_type_class
);
7184 return build_int_cst (NULL_TREE
, type_to_class (TREE_TYPE (arg
)));
7187 /* Fold a call to __builtin_strlen with argument ARG. */
7190 fold_builtin_strlen (tree arg
)
7192 if (!validate_arg (arg
, POINTER_TYPE
))
7196 tree len
= c_strlen (arg
, 0);
7200 /* Convert from the internal "sizetype" type to "size_t". */
7202 len
= fold_convert (size_type_node
, len
);
7210 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7213 fold_builtin_inf (tree type
, int warn
)
7215 REAL_VALUE_TYPE real
;
7217 /* __builtin_inff is intended to be usable to define INFINITY on all
7218 targets. If an infinity is not available, INFINITY expands "to a
7219 positive constant of type float that overflows at translation
7220 time", footnote "In this case, using INFINITY will violate the
7221 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7222 Thus we pedwarn to ensure this constraint violation is
7224 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
7225 pedwarn ("target format does not support infinity");
7228 return build_real (type
, real
);
7231 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7234 fold_builtin_nan (tree arg
, tree type
, int quiet
)
7236 REAL_VALUE_TYPE real
;
7239 if (!validate_arg (arg
, POINTER_TYPE
))
7241 str
= c_getstr (arg
);
7245 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
7248 return build_real (type
, real
);
7251 /* Return true if the floating point expression T has an integer value.
7252 We also allow +Inf, -Inf and NaN to be considered integer values. */
7255 integer_valued_real_p (tree t
)
7257 switch (TREE_CODE (t
))
7264 case NON_LVALUE_EXPR
:
7265 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7270 return integer_valued_real_p (GENERIC_TREE_OPERAND (t
, 1));
7277 return integer_valued_real_p (TREE_OPERAND (t
, 0))
7278 && integer_valued_real_p (TREE_OPERAND (t
, 1));
7281 return integer_valued_real_p (TREE_OPERAND (t
, 1))
7282 && integer_valued_real_p (TREE_OPERAND (t
, 2));
7285 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
7289 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
7290 if (TREE_CODE (type
) == INTEGER_TYPE
)
7292 if (TREE_CODE (type
) == REAL_TYPE
)
7293 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7298 switch (builtin_mathfn_code (t
))
7300 CASE_FLT_FN (BUILT_IN_CEIL
):
7301 CASE_FLT_FN (BUILT_IN_FLOOR
):
7302 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
7303 CASE_FLT_FN (BUILT_IN_RINT
):
7304 CASE_FLT_FN (BUILT_IN_ROUND
):
7305 CASE_FLT_FN (BUILT_IN_TRUNC
):
7308 CASE_FLT_FN (BUILT_IN_FMIN
):
7309 CASE_FLT_FN (BUILT_IN_FMAX
):
7310 return integer_valued_real_p (CALL_EXPR_ARG (t
, 0))
7311 && integer_valued_real_p (CALL_EXPR_ARG (t
, 1));
7324 /* FNDECL is assumed to be a builtin where truncation can be propagated
7325 across (for instance floor((double)f) == (double)floorf (f).
7326 Do the transformation for a call with argument ARG. */
7329 fold_trunc_transparent_mathfn (tree fndecl
, tree arg
)
7331 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7333 if (!validate_arg (arg
, REAL_TYPE
))
7336 /* Integer rounding functions are idempotent. */
7337 if (fcode
== builtin_mathfn_code (arg
))
7340 /* If argument is already integer valued, and we don't need to worry
7341 about setting errno, there's no need to perform rounding. */
7342 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7347 tree arg0
= strip_float_extensions (arg
);
7348 tree ftype
= TREE_TYPE (TREE_TYPE (fndecl
));
7349 tree newtype
= TREE_TYPE (arg0
);
7352 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7353 && (decl
= mathfn_built_in (newtype
, fcode
)))
7354 return fold_convert (ftype
,
7355 build_call_expr (decl
, 1,
7356 fold_convert (newtype
, arg0
)));
7361 /* FNDECL is assumed to be builtin which can narrow the FP type of
7362 the argument, for instance lround((double)f) -> lroundf (f).
7363 Do the transformation for a call with argument ARG. */
7366 fold_fixed_mathfn (tree fndecl
, tree arg
)
7368 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7370 if (!validate_arg (arg
, REAL_TYPE
))
7373 /* If argument is already integer valued, and we don't need to worry
7374 about setting errno, there's no need to perform rounding. */
7375 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7376 return fold_build1 (FIX_TRUNC_EXPR
, TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7380 tree ftype
= TREE_TYPE (arg
);
7381 tree arg0
= strip_float_extensions (arg
);
7382 tree newtype
= TREE_TYPE (arg0
);
7385 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7386 && (decl
= mathfn_built_in (newtype
, fcode
)))
7387 return build_call_expr (decl
, 1, fold_convert (newtype
, arg0
));
7390 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7391 sizeof (long long) == sizeof (long). */
7392 if (TYPE_PRECISION (long_long_integer_type_node
)
7393 == TYPE_PRECISION (long_integer_type_node
))
7395 tree newfn
= NULL_TREE
;
7398 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7399 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7402 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7403 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7406 CASE_FLT_FN (BUILT_IN_LLROUND
):
7407 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7410 CASE_FLT_FN (BUILT_IN_LLRINT
):
7411 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7420 tree newcall
= build_call_expr(newfn
, 1, arg
);
7421 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7428 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7429 return type. Return NULL_TREE if no simplification can be made. */
7432 fold_builtin_cabs (tree arg
, tree type
, tree fndecl
)
7436 if (TREE_CODE (TREE_TYPE (arg
)) != COMPLEX_TYPE
7437 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7440 /* Calculate the result when the argument is a constant. */
7441 if (TREE_CODE (arg
) == COMPLEX_CST
7442 && (res
= do_mpfr_arg2 (TREE_REALPART (arg
), TREE_IMAGPART (arg
),
7446 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7448 tree real
= TREE_OPERAND (arg
, 0);
7449 tree imag
= TREE_OPERAND (arg
, 1);
7451 /* If either part is zero, cabs is fabs of the other. */
7452 if (real_zerop (real
))
7453 return fold_build1 (ABS_EXPR
, type
, imag
);
7454 if (real_zerop (imag
))
7455 return fold_build1 (ABS_EXPR
, type
, real
);
7457 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7458 if (flag_unsafe_math_optimizations
7459 && operand_equal_p (real
, imag
, OEP_PURE_SAME
))
7461 const REAL_VALUE_TYPE sqrt2_trunc
7462 = real_value_truncate (TYPE_MODE (type
), dconstsqrt2
);
7464 return fold_build2 (MULT_EXPR
, type
,
7465 fold_build1 (ABS_EXPR
, type
, real
),
7466 build_real (type
, sqrt2_trunc
));
7470 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7471 if (TREE_CODE (arg
) == NEGATE_EXPR
7472 || TREE_CODE (arg
) == CONJ_EXPR
)
7473 return build_call_expr (fndecl
, 1, TREE_OPERAND (arg
, 0));
7475 /* Don't do this when optimizing for size. */
7476 if (flag_unsafe_math_optimizations
7477 && optimize
&& !optimize_size
)
7479 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
7481 if (sqrtfn
!= NULL_TREE
)
7483 tree rpart
, ipart
, result
;
7485 arg
= builtin_save_expr (arg
);
7487 rpart
= fold_build1 (REALPART_EXPR
, type
, arg
);
7488 ipart
= fold_build1 (IMAGPART_EXPR
, type
, arg
);
7490 rpart
= builtin_save_expr (rpart
);
7491 ipart
= builtin_save_expr (ipart
);
7493 result
= fold_build2 (PLUS_EXPR
, type
,
7494 fold_build2 (MULT_EXPR
, type
,
7496 fold_build2 (MULT_EXPR
, type
,
7499 return build_call_expr (sqrtfn
, 1, result
);
7506 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7507 Return NULL_TREE if no simplification can be made. */
7510 fold_builtin_sqrt (tree arg
, tree type
)
7513 enum built_in_function fcode
;
7516 if (!validate_arg (arg
, REAL_TYPE
))
7519 /* Calculate the result when the argument is a constant. */
7520 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_sqrt
, &dconst0
, NULL
, true)))
7523 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7524 fcode
= builtin_mathfn_code (arg
);
7525 if (flag_unsafe_math_optimizations
&& BUILTIN_EXPONENT_P (fcode
))
7527 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7528 arg
= fold_build2 (MULT_EXPR
, type
,
7529 CALL_EXPR_ARG (arg
, 0),
7530 build_real (type
, dconsthalf
));
7531 return build_call_expr (expfn
, 1, arg
);
7534 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7535 if (flag_unsafe_math_optimizations
&& BUILTIN_ROOT_P (fcode
))
7537 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7541 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7543 /* The inner root was either sqrt or cbrt. */
7544 REAL_VALUE_TYPE dconstroot
=
7545 BUILTIN_SQRT_P (fcode
) ? dconsthalf
: dconstthird
;
7547 /* Adjust for the outer root. */
7548 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7549 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7550 tree_root
= build_real (type
, dconstroot
);
7551 return build_call_expr (powfn
, 2, arg0
, tree_root
);
7555 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7556 if (flag_unsafe_math_optimizations
7557 && (fcode
== BUILT_IN_POW
7558 || fcode
== BUILT_IN_POWF
7559 || fcode
== BUILT_IN_POWL
))
7561 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7562 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7563 tree arg1
= CALL_EXPR_ARG (arg
, 1);
7565 if (!tree_expr_nonnegative_p (arg0
))
7566 arg0
= build1 (ABS_EXPR
, type
, arg0
);
7567 narg1
= fold_build2 (MULT_EXPR
, type
, arg1
,
7568 build_real (type
, dconsthalf
));
7569 return build_call_expr (powfn
, 2, arg0
, narg1
);
7575 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7576 Return NULL_TREE if no simplification can be made. */
7579 fold_builtin_cbrt (tree arg
, tree type
)
7581 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7584 if (!validate_arg (arg
, REAL_TYPE
))
7587 /* Calculate the result when the argument is a constant. */
7588 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cbrt
, NULL
, NULL
, 0)))
7591 if (flag_unsafe_math_optimizations
)
7593 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7594 if (BUILTIN_EXPONENT_P (fcode
))
7596 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7597 const REAL_VALUE_TYPE third_trunc
=
7598 real_value_truncate (TYPE_MODE (type
), dconstthird
);
7599 arg
= fold_build2 (MULT_EXPR
, type
,
7600 CALL_EXPR_ARG (arg
, 0),
7601 build_real (type
, third_trunc
));
7602 return build_call_expr (expfn
, 1, arg
);
7605 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7606 if (BUILTIN_SQRT_P (fcode
))
7608 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7612 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7614 REAL_VALUE_TYPE dconstroot
= dconstthird
;
7616 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7617 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7618 tree_root
= build_real (type
, dconstroot
);
7619 return build_call_expr (powfn
, 2, arg0
, tree_root
);
7623 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7624 if (BUILTIN_CBRT_P (fcode
))
7626 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7627 if (tree_expr_nonnegative_p (arg0
))
7629 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7634 REAL_VALUE_TYPE dconstroot
;
7636 real_arithmetic (&dconstroot
, MULT_EXPR
, &dconstthird
, &dconstthird
);
7637 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7638 tree_root
= build_real (type
, dconstroot
);
7639 return build_call_expr (powfn
, 2, arg0
, tree_root
);
7644 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7645 if (fcode
== BUILT_IN_POW
7646 || fcode
== BUILT_IN_POWF
7647 || fcode
== BUILT_IN_POWL
)
7649 tree arg00
= CALL_EXPR_ARG (arg
, 0);
7650 tree arg01
= CALL_EXPR_ARG (arg
, 1);
7651 if (tree_expr_nonnegative_p (arg00
))
7653 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7654 const REAL_VALUE_TYPE dconstroot
7655 = real_value_truncate (TYPE_MODE (type
), dconstthird
);
7656 tree narg01
= fold_build2 (MULT_EXPR
, type
, arg01
,
7657 build_real (type
, dconstroot
));
7658 return build_call_expr (powfn
, 2, arg00
, narg01
);
7665 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7666 TYPE is the type of the return value. Return NULL_TREE if no
7667 simplification can be made. */
7670 fold_builtin_cos (tree arg
, tree type
, tree fndecl
)
7674 if (!validate_arg (arg
, REAL_TYPE
))
7677 /* Calculate the result when the argument is a constant. */
7678 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cos
, NULL
, NULL
, 0)))
7681 /* Optimize cos(-x) into cos (x). */
7682 if ((narg
= fold_strip_sign_ops (arg
)))
7683 return build_call_expr (fndecl
, 1, narg
);
7688 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7689 Return NULL_TREE if no simplification can be made. */
7692 fold_builtin_cosh (tree arg
, tree type
, tree fndecl
)
7694 if (validate_arg (arg
, REAL_TYPE
))
7698 /* Calculate the result when the argument is a constant. */
7699 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cosh
, NULL
, NULL
, 0)))
7702 /* Optimize cosh(-x) into cosh (x). */
7703 if ((narg
= fold_strip_sign_ops (arg
)))
7704 return build_call_expr (fndecl
, 1, narg
);
7710 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7711 Return NULL_TREE if no simplification can be made. */
7714 fold_builtin_tan (tree arg
, tree type
)
7716 enum built_in_function fcode
;
7719 if (!validate_arg (arg
, REAL_TYPE
))
7722 /* Calculate the result when the argument is a constant. */
7723 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_tan
, NULL
, NULL
, 0)))
7726 /* Optimize tan(atan(x)) = x. */
7727 fcode
= builtin_mathfn_code (arg
);
7728 if (flag_unsafe_math_optimizations
7729 && (fcode
== BUILT_IN_ATAN
7730 || fcode
== BUILT_IN_ATANF
7731 || fcode
== BUILT_IN_ATANL
))
7732 return CALL_EXPR_ARG (arg
, 0);
7737 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7738 NULL_TREE if no simplification can be made. */
7741 fold_builtin_sincos (tree arg0
, tree arg1
, tree arg2
)
7746 if (!validate_arg (arg0
, REAL_TYPE
)
7747 || !validate_arg (arg1
, POINTER_TYPE
)
7748 || !validate_arg (arg2
, POINTER_TYPE
))
7751 type
= TREE_TYPE (arg0
);
7753 /* Calculate the result when the argument is a constant. */
7754 if ((res
= do_mpfr_sincos (arg0
, arg1
, arg2
)))
7757 /* Canonicalize sincos to cexpi. */
7758 if (!TARGET_C99_FUNCTIONS
)
7760 fn
= mathfn_built_in (type
, BUILT_IN_CEXPI
);
7764 call
= build_call_expr (fn
, 1, arg0
);
7765 call
= builtin_save_expr (call
);
7767 return build2 (COMPOUND_EXPR
, type
,
7768 build2 (MODIFY_EXPR
, void_type_node
,
7769 build_fold_indirect_ref (arg1
),
7770 build1 (IMAGPART_EXPR
, type
, call
)),
7771 build2 (MODIFY_EXPR
, void_type_node
,
7772 build_fold_indirect_ref (arg2
),
7773 build1 (REALPART_EXPR
, type
, call
)));
7776 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7777 NULL_TREE if no simplification can be made. */
7780 fold_builtin_cexp (tree arg0
, tree type
)
7783 tree realp
, imagp
, ifn
;
7785 if (!validate_arg (arg0
, COMPLEX_TYPE
))
7788 rtype
= TREE_TYPE (TREE_TYPE (arg0
));
7790 /* In case we can figure out the real part of arg0 and it is constant zero
7792 if (!TARGET_C99_FUNCTIONS
)
7794 ifn
= mathfn_built_in (rtype
, BUILT_IN_CEXPI
);
7798 if ((realp
= fold_unary (REALPART_EXPR
, rtype
, arg0
))
7799 && real_zerop (realp
))
7801 tree narg
= fold_build1 (IMAGPART_EXPR
, rtype
, arg0
);
7802 return build_call_expr (ifn
, 1, narg
);
7805 /* In case we can easily decompose real and imaginary parts split cexp
7806 to exp (r) * cexpi (i). */
7807 if (flag_unsafe_math_optimizations
7810 tree rfn
, rcall
, icall
;
7812 rfn
= mathfn_built_in (rtype
, BUILT_IN_EXP
);
7816 imagp
= fold_unary (IMAGPART_EXPR
, rtype
, arg0
);
7820 icall
= build_call_expr (ifn
, 1, imagp
);
7821 icall
= builtin_save_expr (icall
);
7822 rcall
= build_call_expr (rfn
, 1, realp
);
7823 rcall
= builtin_save_expr (rcall
);
7824 return fold_build2 (COMPLEX_EXPR
, type
,
7825 fold_build2 (MULT_EXPR
, rtype
,
7827 fold_build1 (REALPART_EXPR
, rtype
, icall
)),
7828 fold_build2 (MULT_EXPR
, rtype
,
7830 fold_build1 (IMAGPART_EXPR
, rtype
, icall
)));
7836 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7837 Return NULL_TREE if no simplification can be made. */
7840 fold_builtin_trunc (tree fndecl
, tree arg
)
7842 if (!validate_arg (arg
, REAL_TYPE
))
7845 /* Optimize trunc of constant value. */
7846 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7848 REAL_VALUE_TYPE r
, x
;
7849 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7851 x
= TREE_REAL_CST (arg
);
7852 real_trunc (&r
, TYPE_MODE (type
), &x
);
7853 return build_real (type
, r
);
7856 return fold_trunc_transparent_mathfn (fndecl
, arg
);
7859 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7860 Return NULL_TREE if no simplification can be made. */
7863 fold_builtin_floor (tree fndecl
, tree arg
)
7865 if (!validate_arg (arg
, REAL_TYPE
))
7868 /* Optimize floor of constant value. */
7869 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7873 x
= TREE_REAL_CST (arg
);
7874 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7876 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7879 real_floor (&r
, TYPE_MODE (type
), &x
);
7880 return build_real (type
, r
);
7884 /* Fold floor (x) where x is nonnegative to trunc (x). */
7885 if (tree_expr_nonnegative_p (arg
))
7887 tree truncfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_TRUNC
);
7889 return build_call_expr (truncfn
, 1, arg
);
7892 return fold_trunc_transparent_mathfn (fndecl
, arg
);
7895 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7896 Return NULL_TREE if no simplification can be made. */
7899 fold_builtin_ceil (tree fndecl
, tree arg
)
7901 if (!validate_arg (arg
, REAL_TYPE
))
7904 /* Optimize ceil of constant value. */
7905 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7909 x
= TREE_REAL_CST (arg
);
7910 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7912 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7915 real_ceil (&r
, TYPE_MODE (type
), &x
);
7916 return build_real (type
, r
);
7920 return fold_trunc_transparent_mathfn (fndecl
, arg
);
7923 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7924 Return NULL_TREE if no simplification can be made. */
7927 fold_builtin_round (tree fndecl
, tree arg
)
7929 if (!validate_arg (arg
, REAL_TYPE
))
7932 /* Optimize round of constant value. */
7933 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7937 x
= TREE_REAL_CST (arg
);
7938 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7940 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7943 real_round (&r
, TYPE_MODE (type
), &x
);
7944 return build_real (type
, r
);
7948 return fold_trunc_transparent_mathfn (fndecl
, arg
);
7951 /* Fold function call to builtin lround, lroundf or lroundl (or the
7952 corresponding long long versions) and other rounding functions. ARG
7953 is the argument to the call. Return NULL_TREE if no simplification
7957 fold_builtin_int_roundingfn (tree fndecl
, tree arg
)
7959 if (!validate_arg (arg
, REAL_TYPE
))
7962 /* Optimize lround of constant value. */
7963 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7965 const REAL_VALUE_TYPE x
= TREE_REAL_CST (arg
);
7967 if (real_isfinite (&x
))
7969 tree itype
= TREE_TYPE (TREE_TYPE (fndecl
));
7970 tree ftype
= TREE_TYPE (arg
);
7971 unsigned HOST_WIDE_INT lo2
;
7972 HOST_WIDE_INT hi
, lo
;
7975 switch (DECL_FUNCTION_CODE (fndecl
))
7977 CASE_FLT_FN (BUILT_IN_LFLOOR
):
7978 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7979 real_floor (&r
, TYPE_MODE (ftype
), &x
);
7982 CASE_FLT_FN (BUILT_IN_LCEIL
):
7983 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7984 real_ceil (&r
, TYPE_MODE (ftype
), &x
);
7987 CASE_FLT_FN (BUILT_IN_LROUND
):
7988 CASE_FLT_FN (BUILT_IN_LLROUND
):
7989 real_round (&r
, TYPE_MODE (ftype
), &x
);
7996 REAL_VALUE_TO_INT (&lo
, &hi
, r
);
7997 if (!fit_double_type (lo
, hi
, &lo2
, &hi
, itype
))
7998 return build_int_cst_wide (itype
, lo2
, hi
);
8002 switch (DECL_FUNCTION_CODE (fndecl
))
8004 CASE_FLT_FN (BUILT_IN_LFLOOR
):
8005 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
8006 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8007 if (tree_expr_nonnegative_p (arg
))
8008 return fold_build1 (FIX_TRUNC_EXPR
, TREE_TYPE (TREE_TYPE (fndecl
)),
8014 return fold_fixed_mathfn (fndecl
, arg
);
8017 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8018 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8019 the argument to the call. Return NULL_TREE if no simplification can
8023 fold_builtin_bitop (tree fndecl
, tree arg
)
8025 if (!validate_arg (arg
, INTEGER_TYPE
))
8028 /* Optimize for constant argument. */
8029 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8031 HOST_WIDE_INT hi
, width
, result
;
8032 unsigned HOST_WIDE_INT lo
;
8035 type
= TREE_TYPE (arg
);
8036 width
= TYPE_PRECISION (type
);
8037 lo
= TREE_INT_CST_LOW (arg
);
8039 /* Clear all the bits that are beyond the type's precision. */
8040 if (width
> HOST_BITS_PER_WIDE_INT
)
8042 hi
= TREE_INT_CST_HIGH (arg
);
8043 if (width
< 2 * HOST_BITS_PER_WIDE_INT
)
8044 hi
&= ~((HOST_WIDE_INT
) (-1) >> (width
- HOST_BITS_PER_WIDE_INT
));
8049 if (width
< HOST_BITS_PER_WIDE_INT
)
8050 lo
&= ~((unsigned HOST_WIDE_INT
) (-1) << width
);
8053 switch (DECL_FUNCTION_CODE (fndecl
))
8055 CASE_INT_FN (BUILT_IN_FFS
):
8057 result
= exact_log2 (lo
& -lo
) + 1;
8059 result
= HOST_BITS_PER_WIDE_INT
+ exact_log2 (hi
& -hi
) + 1;
8064 CASE_INT_FN (BUILT_IN_CLZ
):
8066 result
= width
- floor_log2 (hi
) - 1 - HOST_BITS_PER_WIDE_INT
;
8068 result
= width
- floor_log2 (lo
) - 1;
8069 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8073 CASE_INT_FN (BUILT_IN_CTZ
):
8075 result
= exact_log2 (lo
& -lo
);
8077 result
= HOST_BITS_PER_WIDE_INT
+ exact_log2 (hi
& -hi
);
8078 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
8082 CASE_INT_FN (BUILT_IN_POPCOUNT
):
8085 result
++, lo
&= lo
- 1;
8087 result
++, hi
&= hi
- 1;
8090 CASE_INT_FN (BUILT_IN_PARITY
):
8093 result
++, lo
&= lo
- 1;
8095 result
++, hi
&= hi
- 1;
8103 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
8109 /* Fold function call to builtin_bswap and the long and long long
8110 variants. Return NULL_TREE if no simplification can be made. */
8112 fold_builtin_bswap (tree fndecl
, tree arg
)
8114 if (! validate_arg (arg
, INTEGER_TYPE
))
8117 /* Optimize constant value. */
8118 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
8120 HOST_WIDE_INT hi
, width
, r_hi
= 0;
8121 unsigned HOST_WIDE_INT lo
, r_lo
= 0;
8124 type
= TREE_TYPE (arg
);
8125 width
= TYPE_PRECISION (type
);
8126 lo
= TREE_INT_CST_LOW (arg
);
8127 hi
= TREE_INT_CST_HIGH (arg
);
8129 switch (DECL_FUNCTION_CODE (fndecl
))
8131 case BUILT_IN_BSWAP32
:
8132 case BUILT_IN_BSWAP64
:
8136 for (s
= 0; s
< width
; s
+= 8)
8138 int d
= width
- s
- 8;
8139 unsigned HOST_WIDE_INT byte
;
8141 if (s
< HOST_BITS_PER_WIDE_INT
)
8142 byte
= (lo
>> s
) & 0xff;
8144 byte
= (hi
>> (s
- HOST_BITS_PER_WIDE_INT
)) & 0xff;
8146 if (d
< HOST_BITS_PER_WIDE_INT
)
8149 r_hi
|= byte
<< (d
- HOST_BITS_PER_WIDE_INT
);
8159 if (width
< HOST_BITS_PER_WIDE_INT
)
8160 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), r_lo
);
8162 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl
)), r_lo
, r_hi
);
8168 /* Return true if EXPR is the real constant contained in VALUE. */
8171 real_dconstp (tree expr
, const REAL_VALUE_TYPE
*value
)
8175 return ((TREE_CODE (expr
) == REAL_CST
8176 && !TREE_OVERFLOW (expr
)
8177 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr
), *value
))
8178 || (TREE_CODE (expr
) == COMPLEX_CST
8179 && real_dconstp (TREE_REALPART (expr
), value
)
8180 && real_zerop (TREE_IMAGPART (expr
))));
8183 /* A subroutine of fold_builtin to fold the various logarithmic
8184 functions. Return NULL_TREE if no simplification can me made.
8185 FUNC is the corresponding MPFR logarithm function. */
8188 fold_builtin_logarithm (tree fndecl
, tree arg
,
8189 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8191 if (validate_arg (arg
, REAL_TYPE
))
8193 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8195 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8197 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
8198 instead we'll look for 'e' truncated to MODE. So only do
8199 this if flag_unsafe_math_optimizations is set. */
8200 if (flag_unsafe_math_optimizations
&& func
== mpfr_log
)
8202 const REAL_VALUE_TYPE e_truncated
=
8203 real_value_truncate (TYPE_MODE (type
), dconste
);
8204 if (real_dconstp (arg
, &e_truncated
))
8205 return build_real (type
, dconst1
);
8208 /* Calculate the result when the argument is a constant. */
8209 if ((res
= do_mpfr_arg1 (arg
, type
, func
, &dconst0
, NULL
, false)))
8212 /* Special case, optimize logN(expN(x)) = x. */
8213 if (flag_unsafe_math_optimizations
8214 && ((func
== mpfr_log
8215 && (fcode
== BUILT_IN_EXP
8216 || fcode
== BUILT_IN_EXPF
8217 || fcode
== BUILT_IN_EXPL
))
8218 || (func
== mpfr_log2
8219 && (fcode
== BUILT_IN_EXP2
8220 || fcode
== BUILT_IN_EXP2F
8221 || fcode
== BUILT_IN_EXP2L
))
8222 || (func
== mpfr_log10
&& (BUILTIN_EXP10_P (fcode
)))))
8223 return fold_convert (type
, CALL_EXPR_ARG (arg
, 0));
8225 /* Optimize logN(func()) for various exponential functions. We
8226 want to determine the value "x" and the power "exponent" in
8227 order to transform logN(x**exponent) into exponent*logN(x). */
8228 if (flag_unsafe_math_optimizations
)
8230 tree exponent
= 0, x
= 0;
8234 CASE_FLT_FN (BUILT_IN_EXP
):
8235 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8236 x
= build_real (type
,
8237 real_value_truncate (TYPE_MODE (type
), dconste
));
8238 exponent
= CALL_EXPR_ARG (arg
, 0);
8240 CASE_FLT_FN (BUILT_IN_EXP2
):
8241 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8242 x
= build_real (type
, dconst2
);
8243 exponent
= CALL_EXPR_ARG (arg
, 0);
8245 CASE_FLT_FN (BUILT_IN_EXP10
):
8246 CASE_FLT_FN (BUILT_IN_POW10
):
8247 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8248 x
= build_real (type
, dconst10
);
8249 exponent
= CALL_EXPR_ARG (arg
, 0);
8251 CASE_FLT_FN (BUILT_IN_SQRT
):
8252 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8253 x
= CALL_EXPR_ARG (arg
, 0);
8254 exponent
= build_real (type
, dconsthalf
);
8256 CASE_FLT_FN (BUILT_IN_CBRT
):
8257 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8258 x
= CALL_EXPR_ARG (arg
, 0);
8259 exponent
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
8262 CASE_FLT_FN (BUILT_IN_POW
):
8263 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8264 x
= CALL_EXPR_ARG (arg
, 0);
8265 exponent
= CALL_EXPR_ARG (arg
, 1);
8271 /* Now perform the optimization. */
8274 tree logfn
= build_call_expr (fndecl
, 1, x
);
8275 return fold_build2 (MULT_EXPR
, type
, exponent
, logfn
);
8283 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8284 NULL_TREE if no simplification can be made. */
8287 fold_builtin_hypot (tree fndecl
, tree arg0
, tree arg1
, tree type
)
8289 tree res
, narg0
, narg1
;
8291 if (!validate_arg (arg0
, REAL_TYPE
)
8292 || !validate_arg (arg1
, REAL_TYPE
))
8295 /* Calculate the result when the argument is a constant. */
8296 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_hypot
)))
8299 /* If either argument to hypot has a negate or abs, strip that off.
8300 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8301 narg0
= fold_strip_sign_ops (arg0
);
8302 narg1
= fold_strip_sign_ops (arg1
);
8305 return build_call_expr (fndecl
, 2, narg0
? narg0
: arg0
,
8306 narg1
? narg1
: arg1
);
8309 /* If either argument is zero, hypot is fabs of the other. */
8310 if (real_zerop (arg0
))
8311 return fold_build1 (ABS_EXPR
, type
, arg1
);
8312 else if (real_zerop (arg1
))
8313 return fold_build1 (ABS_EXPR
, type
, arg0
);
8315 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8316 if (flag_unsafe_math_optimizations
8317 && operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8319 const REAL_VALUE_TYPE sqrt2_trunc
8320 = real_value_truncate (TYPE_MODE (type
), dconstsqrt2
);
8321 return fold_build2 (MULT_EXPR
, type
,
8322 fold_build1 (ABS_EXPR
, type
, arg0
),
8323 build_real (type
, sqrt2_trunc
));
8330 /* Fold a builtin function call to pow, powf, or powl. Return
8331 NULL_TREE if no simplification can be made. */
8333 fold_builtin_pow (tree fndecl
, tree arg0
, tree arg1
, tree type
)
8337 if (!validate_arg (arg0
, REAL_TYPE
)
8338 || !validate_arg (arg1
, REAL_TYPE
))
8341 /* Calculate the result when the argument is a constant. */
8342 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_pow
)))
8345 /* Optimize pow(1.0,y) = 1.0. */
8346 if (real_onep (arg0
))
8347 return omit_one_operand (type
, build_real (type
, dconst1
), arg1
);
8349 if (TREE_CODE (arg1
) == REAL_CST
8350 && !TREE_OVERFLOW (arg1
))
8352 REAL_VALUE_TYPE cint
;
8356 c
= TREE_REAL_CST (arg1
);
8358 /* Optimize pow(x,0.0) = 1.0. */
8359 if (REAL_VALUES_EQUAL (c
, dconst0
))
8360 return omit_one_operand (type
, build_real (type
, dconst1
),
8363 /* Optimize pow(x,1.0) = x. */
8364 if (REAL_VALUES_EQUAL (c
, dconst1
))
8367 /* Optimize pow(x,-1.0) = 1.0/x. */
8368 if (REAL_VALUES_EQUAL (c
, dconstm1
))
8369 return fold_build2 (RDIV_EXPR
, type
,
8370 build_real (type
, dconst1
), arg0
);
8372 /* Optimize pow(x,0.5) = sqrt(x). */
8373 if (flag_unsafe_math_optimizations
8374 && REAL_VALUES_EQUAL (c
, dconsthalf
))
8376 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
8378 if (sqrtfn
!= NULL_TREE
)
8379 return build_call_expr (sqrtfn
, 1, arg0
);
8382 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8383 if (flag_unsafe_math_optimizations
)
8385 const REAL_VALUE_TYPE dconstroot
8386 = real_value_truncate (TYPE_MODE (type
), dconstthird
);
8388 if (REAL_VALUES_EQUAL (c
, dconstroot
))
8390 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
8391 if (cbrtfn
!= NULL_TREE
)
8392 return build_call_expr (cbrtfn
, 1, arg0
);
8396 /* Check for an integer exponent. */
8397 n
= real_to_integer (&c
);
8398 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
8399 if (real_identical (&c
, &cint
))
8401 /* Attempt to evaluate pow at compile-time. */
8402 if (TREE_CODE (arg0
) == REAL_CST
8403 && !TREE_OVERFLOW (arg0
))
8408 x
= TREE_REAL_CST (arg0
);
8409 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
8410 if (flag_unsafe_math_optimizations
|| !inexact
)
8411 return build_real (type
, x
);
8414 /* Strip sign ops from even integer powers. */
8415 if ((n
& 1) == 0 && flag_unsafe_math_optimizations
)
8417 tree narg0
= fold_strip_sign_ops (arg0
);
8419 return build_call_expr (fndecl
, 2, narg0
, arg1
);
8424 if (flag_unsafe_math_optimizations
)
8426 const enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8428 /* Optimize pow(expN(x),y) = expN(x*y). */
8429 if (BUILTIN_EXPONENT_P (fcode
))
8431 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
8432 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8433 arg
= fold_build2 (MULT_EXPR
, type
, arg
, arg1
);
8434 return build_call_expr (expfn
, 1, arg
);
8437 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8438 if (BUILTIN_SQRT_P (fcode
))
8440 tree narg0
= CALL_EXPR_ARG (arg0
, 0);
8441 tree narg1
= fold_build2 (MULT_EXPR
, type
, arg1
,
8442 build_real (type
, dconsthalf
));
8443 return build_call_expr (fndecl
, 2, narg0
, narg1
);
8446 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8447 if (BUILTIN_CBRT_P (fcode
))
8449 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8450 if (tree_expr_nonnegative_p (arg
))
8452 const REAL_VALUE_TYPE dconstroot
8453 = real_value_truncate (TYPE_MODE (type
), dconstthird
);
8454 tree narg1
= fold_build2 (MULT_EXPR
, type
, arg1
,
8455 build_real (type
, dconstroot
));
8456 return build_call_expr (fndecl
, 2, arg
, narg1
);
8460 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8461 if (fcode
== BUILT_IN_POW
8462 || fcode
== BUILT_IN_POWF
8463 || fcode
== BUILT_IN_POWL
)
8465 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
8466 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
8467 tree narg1
= fold_build2 (MULT_EXPR
, type
, arg01
, arg1
);
8468 return build_call_expr (fndecl
, 2, arg00
, narg1
);
8475 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8476 Return NULL_TREE if no simplification can be made. */
8478 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED
,
8479 tree arg0
, tree arg1
, tree type
)
8481 if (!validate_arg (arg0
, REAL_TYPE
)
8482 || !validate_arg (arg1
, INTEGER_TYPE
))
8485 /* Optimize pow(1.0,y) = 1.0. */
8486 if (real_onep (arg0
))
8487 return omit_one_operand (type
, build_real (type
, dconst1
), arg1
);
8489 if (host_integerp (arg1
, 0))
8491 HOST_WIDE_INT c
= TREE_INT_CST_LOW (arg1
);
8493 /* Evaluate powi at compile-time. */
8494 if (TREE_CODE (arg0
) == REAL_CST
8495 && !TREE_OVERFLOW (arg0
))
8498 x
= TREE_REAL_CST (arg0
);
8499 real_powi (&x
, TYPE_MODE (type
), &x
, c
);
8500 return build_real (type
, x
);
8503 /* Optimize pow(x,0) = 1.0. */
8505 return omit_one_operand (type
, build_real (type
, dconst1
),
8508 /* Optimize pow(x,1) = x. */
8512 /* Optimize pow(x,-1) = 1.0/x. */
8514 return fold_build2 (RDIV_EXPR
, type
,
8515 build_real (type
, dconst1
), arg0
);
8521 /* A subroutine of fold_builtin to fold the various exponent
8522 functions. Return NULL_TREE if no simplification can be made.
8523 FUNC is the corresponding MPFR exponent function. */
8526 fold_builtin_exponent (tree fndecl
, tree arg
,
8527 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8529 if (validate_arg (arg
, REAL_TYPE
))
8531 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8534 /* Calculate the result when the argument is a constant. */
8535 if ((res
= do_mpfr_arg1 (arg
, type
, func
, NULL
, NULL
, 0)))
8538 /* Optimize expN(logN(x)) = x. */
8539 if (flag_unsafe_math_optimizations
)
8541 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8543 if ((func
== mpfr_exp
8544 && (fcode
== BUILT_IN_LOG
8545 || fcode
== BUILT_IN_LOGF
8546 || fcode
== BUILT_IN_LOGL
))
8547 || (func
== mpfr_exp2
8548 && (fcode
== BUILT_IN_LOG2
8549 || fcode
== BUILT_IN_LOG2F
8550 || fcode
== BUILT_IN_LOG2L
))
8551 || (func
== mpfr_exp10
8552 && (fcode
== BUILT_IN_LOG10
8553 || fcode
== BUILT_IN_LOG10F
8554 || fcode
== BUILT_IN_LOG10L
)))
8555 return fold_convert (type
, CALL_EXPR_ARG (arg
, 0));
8562 /* Return true if VAR is a VAR_DECL or a component thereof. */
8565 var_decl_component_p (tree var
)
8568 while (handled_component_p (inner
))
8569 inner
= TREE_OPERAND (inner
, 0);
8570 return SSA_VAR_P (inner
);
8573 /* Fold function call to builtin memset. Return
8574 NULL_TREE if no simplification can be made. */
8577 fold_builtin_memset (tree dest
, tree c
, tree len
, tree type
, bool ignore
)
8580 unsigned HOST_WIDE_INT length
, cval
;
8582 if (! validate_arg (dest
, POINTER_TYPE
)
8583 || ! validate_arg (c
, INTEGER_TYPE
)
8584 || ! validate_arg (len
, INTEGER_TYPE
))
8587 if (! host_integerp (len
, 1))
8590 /* If the LEN parameter is zero, return DEST. */
8591 if (integer_zerop (len
))
8592 return omit_one_operand (type
, dest
, c
);
8594 if (! host_integerp (c
, 1) || TREE_SIDE_EFFECTS (dest
))
8599 if (TREE_CODE (var
) != ADDR_EXPR
)
8602 var
= TREE_OPERAND (var
, 0);
8603 if (TREE_THIS_VOLATILE (var
))
8606 if (!INTEGRAL_TYPE_P (TREE_TYPE (var
))
8607 && !POINTER_TYPE_P (TREE_TYPE (var
)))
8610 if (! var_decl_component_p (var
))
8613 length
= tree_low_cst (len
, 1);
8614 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var
))) != length
8615 || get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
8619 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
8622 if (integer_zerop (c
))
8626 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
8629 cval
= tree_low_cst (c
, 1);
8633 cval
|= (cval
<< 31) << 1;
8636 ret
= build_int_cst_type (TREE_TYPE (var
), cval
);
8637 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, ret
);
8641 return omit_one_operand (type
, dest
, ret
);
8644 /* Fold function call to builtin memset. Return
8645 NULL_TREE if no simplification can be made. */
8648 fold_builtin_bzero (tree dest
, tree size
, bool ignore
)
8650 if (! validate_arg (dest
, POINTER_TYPE
)
8651 || ! validate_arg (size
, INTEGER_TYPE
))
8657 /* New argument list transforming bzero(ptr x, int y) to
8658 memset(ptr x, int 0, size_t y). This is done this way
8659 so that if it isn't expanded inline, we fallback to
8660 calling bzero instead of memset. */
8662 return fold_builtin_memset (dest
, integer_zero_node
,
8663 fold_convert (sizetype
, size
),
8664 void_type_node
, ignore
);
8667 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8668 NULL_TREE if no simplification can be made.
8669 If ENDP is 0, return DEST (like memcpy).
8670 If ENDP is 1, return DEST+LEN (like mempcpy).
8671 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8672 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8676 fold_builtin_memory_op (tree dest
, tree src
, tree len
, tree type
, bool ignore
, int endp
)
8678 tree destvar
, srcvar
, expr
;
8680 if (! validate_arg (dest
, POINTER_TYPE
)
8681 || ! validate_arg (src
, POINTER_TYPE
)
8682 || ! validate_arg (len
, INTEGER_TYPE
))
8685 /* If the LEN parameter is zero, return DEST. */
8686 if (integer_zerop (len
))
8687 return omit_one_operand (type
, dest
, src
);
8689 /* If SRC and DEST are the same (and not volatile), return
8690 DEST{,+LEN,+LEN-1}. */
8691 if (operand_equal_p (src
, dest
, 0))
8695 tree srctype
, desttype
;
8698 int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
8699 int dest_align
= get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
8701 /* Both DEST and SRC must be pointer types.
8702 ??? This is what old code did. Is the testing for pointer types
8705 If either SRC is readonly or length is 1, we can use memcpy. */
8706 if (dest_align
&& src_align
8707 && (readonly_data_expr (src
)
8708 || (host_integerp (len
, 1)
8709 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
>=
8710 tree_low_cst (len
, 1)))))
8712 tree fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
8715 return build_call_expr (fn
, 3, dest
, src
, len
);
8720 if (!host_integerp (len
, 0))
8723 This logic lose for arguments like (type *)malloc (sizeof (type)),
8724 since we strip the casts of up to VOID return value from malloc.
8725 Perhaps we ought to inherit type from non-VOID argument here? */
8728 srctype
= TREE_TYPE (TREE_TYPE (src
));
8729 desttype
= TREE_TYPE (TREE_TYPE (dest
));
8730 if (!srctype
|| !desttype
8731 || !TYPE_SIZE_UNIT (srctype
)
8732 || !TYPE_SIZE_UNIT (desttype
)
8733 || TREE_CODE (TYPE_SIZE_UNIT (srctype
)) != INTEGER_CST
8734 || TREE_CODE (TYPE_SIZE_UNIT (desttype
)) != INTEGER_CST
8735 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
)
8736 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
8739 if (get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
)
8740 < (int) TYPE_ALIGN (desttype
)
8741 || (get_pointer_alignment (src
, BIGGEST_ALIGNMENT
)
8742 < (int) TYPE_ALIGN (srctype
)))
8746 dest
= builtin_save_expr (dest
);
8748 srcvar
= build_fold_indirect_ref (src
);
8749 if (TREE_THIS_VOLATILE (srcvar
))
8751 if (!tree_int_cst_equal (lang_hooks
.expr_size (srcvar
), len
))
8753 /* With memcpy, it is possible to bypass aliasing rules, so without
8754 this check i. e. execute/20060930-2.c would be misoptimized, because
8755 it use conflicting alias set to hold argument for the memcpy call.
8756 This check is probably unnecesary with -fno-strict-aliasing.
8757 Similarly for destvar. See also PR29286. */
8758 if (!var_decl_component_p (srcvar
)
8759 /* Accept: memcpy (*char_var, "test", 1); that simplify
8761 || is_gimple_min_invariant (srcvar
)
8762 || readonly_data_expr (src
))
8765 destvar
= build_fold_indirect_ref (dest
);
8766 if (TREE_THIS_VOLATILE (destvar
))
8768 if (!tree_int_cst_equal (lang_hooks
.expr_size (destvar
), len
))
8770 if (!var_decl_component_p (destvar
))
8773 if (srctype
== desttype
8774 || (gimple_in_ssa_p (cfun
)
8775 && useless_type_conversion_p (desttype
, srctype
)))
8777 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar
))
8778 || POINTER_TYPE_P (TREE_TYPE (srcvar
)))
8779 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar
))
8780 || POINTER_TYPE_P (TREE_TYPE (destvar
))))
8781 expr
= fold_convert (TREE_TYPE (destvar
), srcvar
);
8783 expr
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (destvar
), srcvar
);
8784 expr
= build2 (MODIFY_EXPR
, TREE_TYPE (destvar
), destvar
, expr
);
8790 if (endp
== 0 || endp
== 3)
8791 return omit_one_operand (type
, dest
, expr
);
8797 len
= fold_build2 (MINUS_EXPR
, TREE_TYPE (len
), len
,
8800 dest
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (dest
), dest
, len
);
8801 dest
= fold_convert (type
, dest
);
8803 dest
= omit_one_operand (type
, dest
, expr
);
8807 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8808 If LEN is not NULL, it represents the length of the string to be
8809 copied. Return NULL_TREE if no simplification can be made. */
8812 fold_builtin_strcpy (tree fndecl
, tree dest
, tree src
, tree len
)
8816 if (!validate_arg (dest
, POINTER_TYPE
)
8817 || !validate_arg (src
, POINTER_TYPE
))
8820 /* If SRC and DEST are the same (and not volatile), return DEST. */
8821 if (operand_equal_p (src
, dest
, 0))
8822 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
8827 fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
8833 len
= c_strlen (src
, 1);
8834 if (! len
|| TREE_SIDE_EFFECTS (len
))
8838 len
= size_binop (PLUS_EXPR
, len
, ssize_int (1));
8839 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)),
8840 build_call_expr (fn
, 3, dest
, src
, len
));
8843 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8844 If SLEN is not NULL, it represents the length of the source string.
8845 Return NULL_TREE if no simplification can be made. */
8848 fold_builtin_strncpy (tree fndecl
, tree dest
, tree src
, tree len
, tree slen
)
8852 if (!validate_arg (dest
, POINTER_TYPE
)
8853 || !validate_arg (src
, POINTER_TYPE
)
8854 || !validate_arg (len
, INTEGER_TYPE
))
8857 /* If the LEN parameter is zero, return DEST. */
8858 if (integer_zerop (len
))
8859 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
8861 /* We can't compare slen with len as constants below if len is not a
8863 if (len
== 0 || TREE_CODE (len
) != INTEGER_CST
)
8867 slen
= c_strlen (src
, 1);
8869 /* Now, we must be passed a constant src ptr parameter. */
8870 if (slen
== 0 || TREE_CODE (slen
) != INTEGER_CST
)
8873 slen
= size_binop (PLUS_EXPR
, slen
, ssize_int (1));
8875 /* We do not support simplification of this case, though we do
8876 support it when expanding trees into RTL. */
8877 /* FIXME: generate a call to __builtin_memset. */
8878 if (tree_int_cst_lt (slen
, len
))
8881 /* OK transform into builtin memcpy. */
8882 fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
8885 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)),
8886 build_call_expr (fn
, 3, dest
, src
, len
));
8889 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8890 arguments to the call, and TYPE is its return type.
8891 Return NULL_TREE if no simplification can be made. */
8894 fold_builtin_memchr (tree arg1
, tree arg2
, tree len
, tree type
)
8896 if (!validate_arg (arg1
, POINTER_TYPE
)
8897 || !validate_arg (arg2
, INTEGER_TYPE
)
8898 || !validate_arg (len
, INTEGER_TYPE
))
8904 if (TREE_CODE (arg2
) != INTEGER_CST
8905 || !host_integerp (len
, 1))
8908 p1
= c_getstr (arg1
);
8909 if (p1
&& compare_tree_int (len
, strlen (p1
) + 1) <= 0)
8915 if (target_char_cast (arg2
, &c
))
8918 r
= memchr (p1
, c
, tree_low_cst (len
, 1));
8921 return build_int_cst (TREE_TYPE (arg1
), 0);
8923 tem
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (arg1
), arg1
,
8925 return fold_convert (type
, tem
);
8931 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8932 Return NULL_TREE if no simplification can be made. */
8935 fold_builtin_memcmp (tree arg1
, tree arg2
, tree len
)
8937 const char *p1
, *p2
;
8939 if (!validate_arg (arg1
, POINTER_TYPE
)
8940 || !validate_arg (arg2
, POINTER_TYPE
)
8941 || !validate_arg (len
, INTEGER_TYPE
))
8944 /* If the LEN parameter is zero, return zero. */
8945 if (integer_zerop (len
))
8946 return omit_two_operands (integer_type_node
, integer_zero_node
,
8949 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8950 if (operand_equal_p (arg1
, arg2
, 0))
8951 return omit_one_operand (integer_type_node
, integer_zero_node
, len
);
8953 p1
= c_getstr (arg1
);
8954 p2
= c_getstr (arg2
);
8956 /* If all arguments are constant, and the value of len is not greater
8957 than the lengths of arg1 and arg2, evaluate at compile-time. */
8958 if (host_integerp (len
, 1) && p1
&& p2
8959 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
8960 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
8962 const int r
= memcmp (p1
, p2
, tree_low_cst (len
, 1));
8965 return integer_one_node
;
8967 return integer_minus_one_node
;
8969 return integer_zero_node
;
8972 /* If len parameter is one, return an expression corresponding to
8973 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8974 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
8976 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8977 tree cst_uchar_ptr_node
8978 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8980 tree ind1
= fold_convert (integer_type_node
,
8981 build1 (INDIRECT_REF
, cst_uchar_node
,
8982 fold_convert (cst_uchar_ptr_node
,
8984 tree ind2
= fold_convert (integer_type_node
,
8985 build1 (INDIRECT_REF
, cst_uchar_node
,
8986 fold_convert (cst_uchar_ptr_node
,
8988 return fold_build2 (MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8994 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8995 Return NULL_TREE if no simplification can be made. */
8998 fold_builtin_strcmp (tree arg1
, tree arg2
)
9000 const char *p1
, *p2
;
9002 if (!validate_arg (arg1
, POINTER_TYPE
)
9003 || !validate_arg (arg2
, POINTER_TYPE
))
9006 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9007 if (operand_equal_p (arg1
, arg2
, 0))
9008 return integer_zero_node
;
9010 p1
= c_getstr (arg1
);
9011 p2
= c_getstr (arg2
);
9015 const int i
= strcmp (p1
, p2
);
9017 return integer_minus_one_node
;
9019 return integer_one_node
;
9021 return integer_zero_node
;
9024 /* If the second arg is "", return *(const unsigned char*)arg1. */
9025 if (p2
&& *p2
== '\0')
9027 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9028 tree cst_uchar_ptr_node
9029 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9031 return fold_convert (integer_type_node
,
9032 build1 (INDIRECT_REF
, cst_uchar_node
,
9033 fold_convert (cst_uchar_ptr_node
,
9037 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9038 if (p1
&& *p1
== '\0')
9040 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9041 tree cst_uchar_ptr_node
9042 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9044 tree temp
= fold_convert (integer_type_node
,
9045 build1 (INDIRECT_REF
, cst_uchar_node
,
9046 fold_convert (cst_uchar_ptr_node
,
9048 return fold_build1 (NEGATE_EXPR
, integer_type_node
, temp
);
9054 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9055 Return NULL_TREE if no simplification can be made. */
9058 fold_builtin_strncmp (tree arg1
, tree arg2
, tree len
)
9060 const char *p1
, *p2
;
9062 if (!validate_arg (arg1
, POINTER_TYPE
)
9063 || !validate_arg (arg2
, POINTER_TYPE
)
9064 || !validate_arg (len
, INTEGER_TYPE
))
9067 /* If the LEN parameter is zero, return zero. */
9068 if (integer_zerop (len
))
9069 return omit_two_operands (integer_type_node
, integer_zero_node
,
9072 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9073 if (operand_equal_p (arg1
, arg2
, 0))
9074 return omit_one_operand (integer_type_node
, integer_zero_node
, len
);
9076 p1
= c_getstr (arg1
);
9077 p2
= c_getstr (arg2
);
9079 if (host_integerp (len
, 1) && p1
&& p2
)
9081 const int i
= strncmp (p1
, p2
, tree_low_cst (len
, 1));
9083 return integer_one_node
;
9085 return integer_minus_one_node
;
9087 return integer_zero_node
;
9090 /* If the second arg is "", and the length is greater than zero,
9091 return *(const unsigned char*)arg1. */
9092 if (p2
&& *p2
== '\0'
9093 && TREE_CODE (len
) == INTEGER_CST
9094 && tree_int_cst_sgn (len
) == 1)
9096 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9097 tree cst_uchar_ptr_node
9098 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9100 return fold_convert (integer_type_node
,
9101 build1 (INDIRECT_REF
, cst_uchar_node
,
9102 fold_convert (cst_uchar_ptr_node
,
9106 /* If the first arg is "", and the length is greater than zero,
9107 return -*(const unsigned char*)arg2. */
9108 if (p1
&& *p1
== '\0'
9109 && TREE_CODE (len
) == INTEGER_CST
9110 && tree_int_cst_sgn (len
) == 1)
9112 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9113 tree cst_uchar_ptr_node
9114 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9116 tree temp
= fold_convert (integer_type_node
,
9117 build1 (INDIRECT_REF
, cst_uchar_node
,
9118 fold_convert (cst_uchar_ptr_node
,
9120 return fold_build1 (NEGATE_EXPR
, integer_type_node
, temp
);
9123 /* If len parameter is one, return an expression corresponding to
9124 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9125 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
9127 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
9128 tree cst_uchar_ptr_node
9129 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
9131 tree ind1
= fold_convert (integer_type_node
,
9132 build1 (INDIRECT_REF
, cst_uchar_node
,
9133 fold_convert (cst_uchar_ptr_node
,
9135 tree ind2
= fold_convert (integer_type_node
,
9136 build1 (INDIRECT_REF
, cst_uchar_node
,
9137 fold_convert (cst_uchar_ptr_node
,
9139 return fold_build2 (MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
9145 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9146 ARG. Return NULL_TREE if no simplification can be made. */
9149 fold_builtin_signbit (tree arg
, tree type
)
9153 if (!validate_arg (arg
, REAL_TYPE
))
9156 /* If ARG is a compile-time constant, determine the result. */
9157 if (TREE_CODE (arg
) == REAL_CST
9158 && !TREE_OVERFLOW (arg
))
9162 c
= TREE_REAL_CST (arg
);
9163 temp
= REAL_VALUE_NEGATIVE (c
) ? integer_one_node
: integer_zero_node
;
9164 return fold_convert (type
, temp
);
9167 /* If ARG is non-negative, the result is always zero. */
9168 if (tree_expr_nonnegative_p (arg
))
9169 return omit_one_operand (type
, integer_zero_node
, arg
);
9171 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9172 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg
))))
9173 return fold_build2 (LT_EXPR
, type
, arg
,
9174 build_real (TREE_TYPE (arg
), dconst0
));
9179 /* Fold function call to builtin copysign, copysignf or copysignl with
9180 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9184 fold_builtin_copysign (tree fndecl
, tree arg1
, tree arg2
, tree type
)
9188 if (!validate_arg (arg1
, REAL_TYPE
)
9189 || !validate_arg (arg2
, REAL_TYPE
))
9192 /* copysign(X,X) is X. */
9193 if (operand_equal_p (arg1
, arg2
, 0))
9194 return fold_convert (type
, arg1
);
9196 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9197 if (TREE_CODE (arg1
) == REAL_CST
9198 && TREE_CODE (arg2
) == REAL_CST
9199 && !TREE_OVERFLOW (arg1
)
9200 && !TREE_OVERFLOW (arg2
))
9202 REAL_VALUE_TYPE c1
, c2
;
9204 c1
= TREE_REAL_CST (arg1
);
9205 c2
= TREE_REAL_CST (arg2
);
9206 /* c1.sign := c2.sign. */
9207 real_copysign (&c1
, &c2
);
9208 return build_real (type
, c1
);
9211 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9212 Remember to evaluate Y for side-effects. */
9213 if (tree_expr_nonnegative_p (arg2
))
9214 return omit_one_operand (type
,
9215 fold_build1 (ABS_EXPR
, type
, arg1
),
9218 /* Strip sign changing operations for the first argument. */
9219 tem
= fold_strip_sign_ops (arg1
);
9221 return build_call_expr (fndecl
, 2, tem
, arg2
);
9226 /* Fold a call to builtin isascii with argument ARG. */
9229 fold_builtin_isascii (tree arg
)
9231 if (!validate_arg (arg
, INTEGER_TYPE
))
9235 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9236 arg
= build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
9237 build_int_cst (NULL_TREE
,
9238 ~ (unsigned HOST_WIDE_INT
) 0x7f));
9239 return fold_build2 (EQ_EXPR
, integer_type_node
,
9240 arg
, integer_zero_node
);
9244 /* Fold a call to builtin toascii with argument ARG. */
9247 fold_builtin_toascii (tree arg
)
9249 if (!validate_arg (arg
, INTEGER_TYPE
))
9252 /* Transform toascii(c) -> (c & 0x7f). */
9253 return fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
9254 build_int_cst (NULL_TREE
, 0x7f));
9257 /* Fold a call to builtin isdigit with argument ARG. */
9260 fold_builtin_isdigit (tree arg
)
9262 if (!validate_arg (arg
, INTEGER_TYPE
))
9266 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9267 /* According to the C standard, isdigit is unaffected by locale.
9268 However, it definitely is affected by the target character set. */
9269 unsigned HOST_WIDE_INT target_digit0
9270 = lang_hooks
.to_target_charset ('0');
9272 if (target_digit0
== 0)
9275 arg
= fold_convert (unsigned_type_node
, arg
);
9276 arg
= build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
9277 build_int_cst (unsigned_type_node
, target_digit0
));
9278 return fold_build2 (LE_EXPR
, integer_type_node
, arg
,
9279 build_int_cst (unsigned_type_node
, 9));
9283 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9286 fold_builtin_fabs (tree arg
, tree type
)
9288 if (!validate_arg (arg
, REAL_TYPE
))
9291 arg
= fold_convert (type
, arg
);
9292 if (TREE_CODE (arg
) == REAL_CST
)
9293 return fold_abs_const (arg
, type
);
9294 return fold_build1 (ABS_EXPR
, type
, arg
);
9297 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9300 fold_builtin_abs (tree arg
, tree type
)
9302 if (!validate_arg (arg
, INTEGER_TYPE
))
9305 arg
= fold_convert (type
, arg
);
9306 if (TREE_CODE (arg
) == INTEGER_CST
)
9307 return fold_abs_const (arg
, type
);
9308 return fold_build1 (ABS_EXPR
, type
, arg
);
9311 /* Fold a call to builtin fmin or fmax. */
9314 fold_builtin_fmin_fmax (tree arg0
, tree arg1
, tree type
, bool max
)
9316 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
9318 /* Calculate the result when the argument is a constant. */
9319 tree res
= do_mpfr_arg2 (arg0
, arg1
, type
, (max
? mpfr_max
: mpfr_min
));
9324 /* If either argument is NaN, return the other one. Avoid the
9325 transformation if we get (and honor) a signalling NaN. Using
9326 omit_one_operand() ensures we create a non-lvalue. */
9327 if (TREE_CODE (arg0
) == REAL_CST
9328 && real_isnan (&TREE_REAL_CST (arg0
))
9329 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9330 || ! TREE_REAL_CST (arg0
).signalling
))
9331 return omit_one_operand (type
, arg1
, arg0
);
9332 if (TREE_CODE (arg1
) == REAL_CST
9333 && real_isnan (&TREE_REAL_CST (arg1
))
9334 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
)))
9335 || ! TREE_REAL_CST (arg1
).signalling
))
9336 return omit_one_operand (type
, arg0
, arg1
);
9338 /* Transform fmin/fmax(x,x) -> x. */
9339 if (operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
9340 return omit_one_operand (type
, arg0
, arg1
);
9342 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9343 functions to return the numeric arg if the other one is NaN.
9344 These tree codes don't honor that, so only transform if
9345 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9346 handled, so we don't have to worry about it either. */
9347 if (flag_finite_math_only
)
9348 return fold_build2 ((max
? MAX_EXPR
: MIN_EXPR
), type
,
9349 fold_convert (type
, arg0
),
9350 fold_convert (type
, arg1
));
9355 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9358 fold_builtin_carg (tree arg
, tree type
)
9360 if (validate_arg (arg
, COMPLEX_TYPE
))
9362 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
9366 tree new_arg
= builtin_save_expr (arg
);
9367 tree r_arg
= fold_build1 (REALPART_EXPR
, type
, new_arg
);
9368 tree i_arg
= fold_build1 (IMAGPART_EXPR
, type
, new_arg
);
9369 return build_call_expr (atan2_fn
, 2, i_arg
, r_arg
);
9376 /* Fold a call to builtin logb/ilogb. */
9379 fold_builtin_logb (tree arg
, tree rettype
)
9381 if (! validate_arg (arg
, REAL_TYPE
))
9386 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9388 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9394 /* If arg is Inf or NaN and we're logb, return it. */
9395 if (TREE_CODE (rettype
) == REAL_TYPE
)
9396 return fold_convert (rettype
, arg
);
9397 /* Fall through... */
9399 /* Zero may set errno and/or raise an exception for logb, also
9400 for ilogb we don't know FP_ILOGB0. */
9403 /* For normal numbers, proceed iff radix == 2. In GCC,
9404 normalized significands are in the range [0.5, 1.0). We
9405 want the exponent as if they were [1.0, 2.0) so get the
9406 exponent and subtract 1. */
9407 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9408 return fold_convert (rettype
, build_int_cst (NULL_TREE
,
9409 REAL_EXP (value
)-1));
9417 /* Fold a call to builtin significand, if radix == 2. */
9420 fold_builtin_significand (tree arg
, tree rettype
)
9422 if (! validate_arg (arg
, REAL_TYPE
))
9427 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9429 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9436 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9437 return fold_convert (rettype
, arg
);
9439 /* For normal numbers, proceed iff radix == 2. */
9440 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9442 REAL_VALUE_TYPE result
= *value
;
9443 /* In GCC, normalized significands are in the range [0.5,
9444 1.0). We want them to be [1.0, 2.0) so set the
9446 SET_REAL_EXP (&result
, 1);
9447 return build_real (rettype
, result
);
9456 /* Fold a call to builtin frexp, we can assume the base is 2. */
9459 fold_builtin_frexp (tree arg0
, tree arg1
, tree rettype
)
9461 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9466 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9469 arg1
= build_fold_indirect_ref (arg1
);
9471 /* Proceed if a valid pointer type was passed in. */
9472 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
9474 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9480 /* For +-0, return (*exp = 0, +-0). */
9481 exp
= integer_zero_node
;
9486 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9487 return omit_one_operand (rettype
, arg0
, arg1
);
9490 /* Since the frexp function always expects base 2, and in
9491 GCC normalized significands are already in the range
9492 [0.5, 1.0), we have exactly what frexp wants. */
9493 REAL_VALUE_TYPE frac_rvt
= *value
;
9494 SET_REAL_EXP (&frac_rvt
, 0);
9495 frac
= build_real (rettype
, frac_rvt
);
9496 exp
= build_int_cst (NULL_TREE
, REAL_EXP (value
));
9503 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9504 arg1
= fold_build2 (MODIFY_EXPR
, rettype
, arg1
, exp
);
9505 TREE_SIDE_EFFECTS (arg1
) = 1;
9506 return fold_build2 (COMPOUND_EXPR
, rettype
, arg1
, frac
);
9512 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9513 then we can assume the base is two. If it's false, then we have to
9514 check the mode of the TYPE parameter in certain cases. */
9517 fold_builtin_load_exponent (tree arg0
, tree arg1
, tree type
, bool ldexp
)
9519 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, INTEGER_TYPE
))
9524 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9525 if (real_zerop (arg0
) || integer_zerop (arg1
)
9526 || (TREE_CODE (arg0
) == REAL_CST
9527 && !real_isfinite (&TREE_REAL_CST (arg0
))))
9528 return omit_one_operand (type
, arg0
, arg1
);
9530 /* If both arguments are constant, then try to evaluate it. */
9531 if ((ldexp
|| REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2)
9532 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
9533 && host_integerp (arg1
, 0))
9535 /* Bound the maximum adjustment to twice the range of the
9536 mode's valid exponents. Use abs to ensure the range is
9537 positive as a sanity check. */
9538 const long max_exp_adj
= 2 *
9539 labs (REAL_MODE_FORMAT (TYPE_MODE (type
))->emax
9540 - REAL_MODE_FORMAT (TYPE_MODE (type
))->emin
);
9542 /* Get the user-requested adjustment. */
9543 const HOST_WIDE_INT req_exp_adj
= tree_low_cst (arg1
, 0);
9545 /* The requested adjustment must be inside this range. This
9546 is a preliminary cap to avoid things like overflow, we
9547 may still fail to compute the result for other reasons. */
9548 if (-max_exp_adj
< req_exp_adj
&& req_exp_adj
< max_exp_adj
)
9550 REAL_VALUE_TYPE initial_result
;
9552 real_ldexp (&initial_result
, &TREE_REAL_CST (arg0
), req_exp_adj
);
9554 /* Ensure we didn't overflow. */
9555 if (! real_isinf (&initial_result
))
9557 const REAL_VALUE_TYPE trunc_result
9558 = real_value_truncate (TYPE_MODE (type
), initial_result
);
9560 /* Only proceed if the target mode can hold the
9562 if (REAL_VALUES_EQUAL (initial_result
, trunc_result
))
9563 return build_real (type
, trunc_result
);
9572 /* Fold a call to builtin modf. */
9575 fold_builtin_modf (tree arg0
, tree arg1
, tree rettype
)
9577 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9582 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9585 arg1
= build_fold_indirect_ref (arg1
);
9587 /* Proceed if a valid pointer type was passed in. */
9588 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
9590 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9591 REAL_VALUE_TYPE trunc
, frac
;
9597 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9598 trunc
= frac
= *value
;
9601 /* For +-Inf, return (*arg1 = arg0, +-0). */
9603 frac
.sign
= value
->sign
;
9607 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9608 real_trunc (&trunc
, VOIDmode
, value
);
9609 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
9610 /* If the original number was negative and already
9611 integral, then the fractional part is -0.0. */
9612 if (value
->sign
&& frac
.cl
== rvc_zero
)
9613 frac
.sign
= value
->sign
;
9617 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9618 arg1
= fold_build2 (MODIFY_EXPR
, rettype
, arg1
,
9619 build_real (rettype
, trunc
));
9620 TREE_SIDE_EFFECTS (arg1
) = 1;
9621 return fold_build2 (COMPOUND_EXPR
, rettype
, arg1
,
9622 build_real (rettype
, frac
));
9628 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9629 ARG is the argument for the call. */
9632 fold_builtin_classify (tree fndecl
, tree arg
, int builtin_index
)
9634 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9637 if (!validate_arg (arg
, REAL_TYPE
))
9639 error ("non-floating-point argument to function %qs",
9640 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
9641 return error_mark_node
;
9644 switch (builtin_index
)
9646 case BUILT_IN_ISINF
:
9647 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
9648 return omit_one_operand (type
, integer_zero_node
, arg
);
9650 if (TREE_CODE (arg
) == REAL_CST
)
9652 r
= TREE_REAL_CST (arg
);
9653 if (real_isinf (&r
))
9654 return real_compare (GT_EXPR
, &r
, &dconst0
)
9655 ? integer_one_node
: integer_minus_one_node
;
9657 return integer_zero_node
;
9662 case BUILT_IN_ISFINITE
:
9663 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
)))
9664 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
9665 return omit_one_operand (type
, integer_one_node
, arg
);
9667 if (TREE_CODE (arg
) == REAL_CST
)
9669 r
= TREE_REAL_CST (arg
);
9670 return real_isfinite (&r
) ? integer_one_node
: integer_zero_node
;
9675 case BUILT_IN_ISNAN
:
9676 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
))))
9677 return omit_one_operand (type
, integer_zero_node
, arg
);
9679 if (TREE_CODE (arg
) == REAL_CST
)
9681 r
= TREE_REAL_CST (arg
);
9682 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
9685 arg
= builtin_save_expr (arg
);
9686 return fold_build2 (UNORDERED_EXPR
, type
, arg
, arg
);
9693 /* Fold a call to an unordered comparison function such as
9694 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9695 being called and ARG0 and ARG1 are the arguments for the call.
9696 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9697 the opposite of the desired result. UNORDERED_CODE is used
9698 for modes that can hold NaNs and ORDERED_CODE is used for
9702 fold_builtin_unordered_cmp (tree fndecl
, tree arg0
, tree arg1
,
9703 enum tree_code unordered_code
,
9704 enum tree_code ordered_code
)
9706 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9707 enum tree_code code
;
9709 enum tree_code code0
, code1
;
9710 tree cmp_type
= NULL_TREE
;
9712 type0
= TREE_TYPE (arg0
);
9713 type1
= TREE_TYPE (arg1
);
9715 code0
= TREE_CODE (type0
);
9716 code1
= TREE_CODE (type1
);
9718 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
9719 /* Choose the wider of two real types. */
9720 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
9722 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
9724 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
9728 error ("non-floating-point argument to function %qs",
9729 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
9730 return error_mark_node
;
9733 arg0
= fold_convert (cmp_type
, arg0
);
9734 arg1
= fold_convert (cmp_type
, arg1
);
9736 if (unordered_code
== UNORDERED_EXPR
)
9738 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9739 return omit_two_operands (type
, integer_zero_node
, arg0
, arg1
);
9740 return fold_build2 (UNORDERED_EXPR
, type
, arg0
, arg1
);
9743 code
= HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))) ? unordered_code
9745 return fold_build1 (TRUTH_NOT_EXPR
, type
,
9746 fold_build2 (code
, type
, arg0
, arg1
));
9749 /* Fold a call to built-in function FNDECL with 0 arguments.
9750 IGNORE is true if the result of the function call is ignored. This
9751 function returns NULL_TREE if no simplification was possible. */
9754 fold_builtin_0 (tree fndecl
, bool ignore ATTRIBUTE_UNUSED
)
9756 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9757 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9760 CASE_FLT_FN (BUILT_IN_INF
):
9761 case BUILT_IN_INFD32
:
9762 case BUILT_IN_INFD64
:
9763 case BUILT_IN_INFD128
:
9764 return fold_builtin_inf (type
, true);
9766 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
9767 return fold_builtin_inf (type
, false);
9769 case BUILT_IN_CLASSIFY_TYPE
:
9770 return fold_builtin_classify_type (NULL_TREE
);
9778 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9779 IGNORE is true if the result of the function call is ignored. This
9780 function returns NULL_TREE if no simplification was possible. */
9783 fold_builtin_1 (tree fndecl
, tree arg0
, bool ignore
)
9785 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9786 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9790 case BUILT_IN_CONSTANT_P
:
9792 tree val
= fold_builtin_constant_p (arg0
);
9794 /* Gimplification will pull the CALL_EXPR for the builtin out of
9795 an if condition. When not optimizing, we'll not CSE it back.
9796 To avoid link error types of regressions, return false now. */
9797 if (!val
&& !optimize
)
9798 val
= integer_zero_node
;
9803 case BUILT_IN_CLASSIFY_TYPE
:
9804 return fold_builtin_classify_type (arg0
);
9806 case BUILT_IN_STRLEN
:
9807 return fold_builtin_strlen (arg0
);
9809 CASE_FLT_FN (BUILT_IN_FABS
):
9810 return fold_builtin_fabs (arg0
, type
);
9814 case BUILT_IN_LLABS
:
9815 case BUILT_IN_IMAXABS
:
9816 return fold_builtin_abs (arg0
, type
);
9818 CASE_FLT_FN (BUILT_IN_CONJ
):
9819 if (validate_arg (arg0
, COMPLEX_TYPE
))
9820 return fold_build1 (CONJ_EXPR
, type
, arg0
);
9823 CASE_FLT_FN (BUILT_IN_CREAL
):
9824 if (validate_arg (arg0
, COMPLEX_TYPE
))
9825 return non_lvalue (fold_build1 (REALPART_EXPR
, type
, arg0
));;
9828 CASE_FLT_FN (BUILT_IN_CIMAG
):
9829 if (validate_arg (arg0
, COMPLEX_TYPE
))
9830 return non_lvalue (fold_build1 (IMAGPART_EXPR
, type
, arg0
));
9833 CASE_FLT_FN (BUILT_IN_CCOS
):
9834 CASE_FLT_FN (BUILT_IN_CCOSH
):
9835 /* These functions are "even", i.e. f(x) == f(-x). */
9836 if (validate_arg (arg0
, COMPLEX_TYPE
))
9838 tree narg
= fold_strip_sign_ops (arg0
);
9840 return build_call_expr (fndecl
, 1, narg
);
9844 CASE_FLT_FN (BUILT_IN_CABS
):
9845 return fold_builtin_cabs (arg0
, type
, fndecl
);
9847 CASE_FLT_FN (BUILT_IN_CARG
):
9848 return fold_builtin_carg (arg0
, type
);
9850 CASE_FLT_FN (BUILT_IN_SQRT
):
9851 return fold_builtin_sqrt (arg0
, type
);
9853 CASE_FLT_FN (BUILT_IN_CBRT
):
9854 return fold_builtin_cbrt (arg0
, type
);
9856 CASE_FLT_FN (BUILT_IN_ASIN
):
9857 if (validate_arg (arg0
, REAL_TYPE
))
9858 return do_mpfr_arg1 (arg0
, type
, mpfr_asin
,
9859 &dconstm1
, &dconst1
, true);
9862 CASE_FLT_FN (BUILT_IN_ACOS
):
9863 if (validate_arg (arg0
, REAL_TYPE
))
9864 return do_mpfr_arg1 (arg0
, type
, mpfr_acos
,
9865 &dconstm1
, &dconst1
, true);
9868 CASE_FLT_FN (BUILT_IN_ATAN
):
9869 if (validate_arg (arg0
, REAL_TYPE
))
9870 return do_mpfr_arg1 (arg0
, type
, mpfr_atan
, NULL
, NULL
, 0);
9873 CASE_FLT_FN (BUILT_IN_ASINH
):
9874 if (validate_arg (arg0
, REAL_TYPE
))
9875 return do_mpfr_arg1 (arg0
, type
, mpfr_asinh
, NULL
, NULL
, 0);
9878 CASE_FLT_FN (BUILT_IN_ACOSH
):
9879 if (validate_arg (arg0
, REAL_TYPE
))
9880 return do_mpfr_arg1 (arg0
, type
, mpfr_acosh
,
9881 &dconst1
, NULL
, true);
9884 CASE_FLT_FN (BUILT_IN_ATANH
):
9885 if (validate_arg (arg0
, REAL_TYPE
))
9886 return do_mpfr_arg1 (arg0
, type
, mpfr_atanh
,
9887 &dconstm1
, &dconst1
, false);
9890 CASE_FLT_FN (BUILT_IN_SIN
):
9891 if (validate_arg (arg0
, REAL_TYPE
))
9892 return do_mpfr_arg1 (arg0
, type
, mpfr_sin
, NULL
, NULL
, 0);
9895 CASE_FLT_FN (BUILT_IN_COS
):
9896 return fold_builtin_cos (arg0
, type
, fndecl
);
9899 CASE_FLT_FN (BUILT_IN_TAN
):
9900 return fold_builtin_tan (arg0
, type
);
9902 CASE_FLT_FN (BUILT_IN_CEXP
):
9903 return fold_builtin_cexp (arg0
, type
);
9905 CASE_FLT_FN (BUILT_IN_CEXPI
):
9906 if (validate_arg (arg0
, REAL_TYPE
))
9907 return do_mpfr_sincos (arg0
, NULL_TREE
, NULL_TREE
);
9910 CASE_FLT_FN (BUILT_IN_SINH
):
9911 if (validate_arg (arg0
, REAL_TYPE
))
9912 return do_mpfr_arg1 (arg0
, type
, mpfr_sinh
, NULL
, NULL
, 0);
9915 CASE_FLT_FN (BUILT_IN_COSH
):
9916 return fold_builtin_cosh (arg0
, type
, fndecl
);
9918 CASE_FLT_FN (BUILT_IN_TANH
):
9919 if (validate_arg (arg0
, REAL_TYPE
))
9920 return do_mpfr_arg1 (arg0
, type
, mpfr_tanh
, NULL
, NULL
, 0);
9923 CASE_FLT_FN (BUILT_IN_ERF
):
9924 if (validate_arg (arg0
, REAL_TYPE
))
9925 return do_mpfr_arg1 (arg0
, type
, mpfr_erf
, NULL
, NULL
, 0);
9928 CASE_FLT_FN (BUILT_IN_ERFC
):
9929 if (validate_arg (arg0
, REAL_TYPE
))
9930 return do_mpfr_arg1 (arg0
, type
, mpfr_erfc
, NULL
, NULL
, 0);
9933 CASE_FLT_FN (BUILT_IN_TGAMMA
):
9934 if (validate_arg (arg0
, REAL_TYPE
))
9935 return do_mpfr_arg1 (arg0
, type
, mpfr_gamma
, NULL
, NULL
, 0);
9938 CASE_FLT_FN (BUILT_IN_EXP
):
9939 return fold_builtin_exponent (fndecl
, arg0
, mpfr_exp
);
9941 CASE_FLT_FN (BUILT_IN_EXP2
):
9942 return fold_builtin_exponent (fndecl
, arg0
, mpfr_exp2
);
9944 CASE_FLT_FN (BUILT_IN_EXP10
):
9945 CASE_FLT_FN (BUILT_IN_POW10
):
9946 return fold_builtin_exponent (fndecl
, arg0
, mpfr_exp10
);
9948 CASE_FLT_FN (BUILT_IN_EXPM1
):
9949 if (validate_arg (arg0
, REAL_TYPE
))
9950 return do_mpfr_arg1 (arg0
, type
, mpfr_expm1
, NULL
, NULL
, 0);
9953 CASE_FLT_FN (BUILT_IN_LOG
):
9954 return fold_builtin_logarithm (fndecl
, arg0
, mpfr_log
);
9956 CASE_FLT_FN (BUILT_IN_LOG2
):
9957 return fold_builtin_logarithm (fndecl
, arg0
, mpfr_log2
);
9959 CASE_FLT_FN (BUILT_IN_LOG10
):
9960 return fold_builtin_logarithm (fndecl
, arg0
, mpfr_log10
);
9962 CASE_FLT_FN (BUILT_IN_LOG1P
):
9963 if (validate_arg (arg0
, REAL_TYPE
))
9964 return do_mpfr_arg1 (arg0
, type
, mpfr_log1p
,
9965 &dconstm1
, NULL
, false);
9968 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
9969 CASE_FLT_FN (BUILT_IN_J0
):
9970 if (validate_arg (arg0
, REAL_TYPE
))
9971 return do_mpfr_arg1 (arg0
, type
, mpfr_j0
,
9975 CASE_FLT_FN (BUILT_IN_J1
):
9976 if (validate_arg (arg0
, REAL_TYPE
))
9977 return do_mpfr_arg1 (arg0
, type
, mpfr_j1
,
9981 CASE_FLT_FN (BUILT_IN_Y0
):
9982 if (validate_arg (arg0
, REAL_TYPE
))
9983 return do_mpfr_arg1 (arg0
, type
, mpfr_y0
,
9984 &dconst0
, NULL
, false);
9987 CASE_FLT_FN (BUILT_IN_Y1
):
9988 if (validate_arg (arg0
, REAL_TYPE
))
9989 return do_mpfr_arg1 (arg0
, type
, mpfr_y1
,
9990 &dconst0
, NULL
, false);
9994 CASE_FLT_FN (BUILT_IN_NAN
):
9995 case BUILT_IN_NAND32
:
9996 case BUILT_IN_NAND64
:
9997 case BUILT_IN_NAND128
:
9998 return fold_builtin_nan (arg0
, type
, true);
10000 CASE_FLT_FN (BUILT_IN_NANS
):
10001 return fold_builtin_nan (arg0
, type
, false);
10003 CASE_FLT_FN (BUILT_IN_FLOOR
):
10004 return fold_builtin_floor (fndecl
, arg0
);
10006 CASE_FLT_FN (BUILT_IN_CEIL
):
10007 return fold_builtin_ceil (fndecl
, arg0
);
10009 CASE_FLT_FN (BUILT_IN_TRUNC
):
10010 return fold_builtin_trunc (fndecl
, arg0
);
10012 CASE_FLT_FN (BUILT_IN_ROUND
):
10013 return fold_builtin_round (fndecl
, arg0
);
10015 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
10016 CASE_FLT_FN (BUILT_IN_RINT
):
10017 return fold_trunc_transparent_mathfn (fndecl
, arg0
);
10019 CASE_FLT_FN (BUILT_IN_LCEIL
):
10020 CASE_FLT_FN (BUILT_IN_LLCEIL
):
10021 CASE_FLT_FN (BUILT_IN_LFLOOR
):
10022 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
10023 CASE_FLT_FN (BUILT_IN_LROUND
):
10024 CASE_FLT_FN (BUILT_IN_LLROUND
):
10025 return fold_builtin_int_roundingfn (fndecl
, arg0
);
10027 CASE_FLT_FN (BUILT_IN_LRINT
):
10028 CASE_FLT_FN (BUILT_IN_LLRINT
):
10029 return fold_fixed_mathfn (fndecl
, arg0
);
10031 case BUILT_IN_BSWAP32
:
10032 case BUILT_IN_BSWAP64
:
10033 return fold_builtin_bswap (fndecl
, arg0
);
10035 CASE_INT_FN (BUILT_IN_FFS
):
10036 CASE_INT_FN (BUILT_IN_CLZ
):
10037 CASE_INT_FN (BUILT_IN_CTZ
):
10038 CASE_INT_FN (BUILT_IN_POPCOUNT
):
10039 CASE_INT_FN (BUILT_IN_PARITY
):
10040 return fold_builtin_bitop (fndecl
, arg0
);
10042 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
10043 return fold_builtin_signbit (arg0
, type
);
10045 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
10046 return fold_builtin_significand (arg0
, type
);
10048 CASE_FLT_FN (BUILT_IN_ILOGB
):
10049 CASE_FLT_FN (BUILT_IN_LOGB
):
10050 return fold_builtin_logb (arg0
, type
);
10052 case BUILT_IN_ISASCII
:
10053 return fold_builtin_isascii (arg0
);
10055 case BUILT_IN_TOASCII
:
10056 return fold_builtin_toascii (arg0
);
10058 case BUILT_IN_ISDIGIT
:
10059 return fold_builtin_isdigit (arg0
);
10061 CASE_FLT_FN (BUILT_IN_FINITE
):
10062 case BUILT_IN_FINITED32
:
10063 case BUILT_IN_FINITED64
:
10064 case BUILT_IN_FINITED128
:
10065 case BUILT_IN_ISFINITE
:
10066 return fold_builtin_classify (fndecl
, arg0
, BUILT_IN_ISFINITE
);
10068 CASE_FLT_FN (BUILT_IN_ISINF
):
10069 case BUILT_IN_ISINFD32
:
10070 case BUILT_IN_ISINFD64
:
10071 case BUILT_IN_ISINFD128
:
10072 return fold_builtin_classify (fndecl
, arg0
, BUILT_IN_ISINF
);
10074 CASE_FLT_FN (BUILT_IN_ISNAN
):
10075 case BUILT_IN_ISNAND32
:
10076 case BUILT_IN_ISNAND64
:
10077 case BUILT_IN_ISNAND128
:
10078 return fold_builtin_classify (fndecl
, arg0
, BUILT_IN_ISNAN
);
10080 case BUILT_IN_ISNORMAL
:
10081 if (!validate_arg (arg0
, REAL_TYPE
))
10083 error ("non-floating-point argument to function %qs",
10084 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
10085 return error_mark_node
;
10089 case BUILT_IN_PRINTF
:
10090 case BUILT_IN_PRINTF_UNLOCKED
:
10091 case BUILT_IN_VPRINTF
:
10092 return fold_builtin_printf (fndecl
, arg0
, NULL_TREE
, ignore
, fcode
);
10102 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10103 IGNORE is true if the result of the function call is ignored. This
10104 function returns NULL_TREE if no simplification was possible. */
10107 fold_builtin_2 (tree fndecl
, tree arg0
, tree arg1
, bool ignore
)
10109 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10110 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10114 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10115 CASE_FLT_FN (BUILT_IN_JN
):
10116 if (validate_arg (arg0
, INTEGER_TYPE
)
10117 && validate_arg (arg1
, REAL_TYPE
))
10118 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_jn
, NULL
, 0);
10121 CASE_FLT_FN (BUILT_IN_YN
):
10122 if (validate_arg (arg0
, INTEGER_TYPE
)
10123 && validate_arg (arg1
, REAL_TYPE
))
10124 return do_mpfr_bessel_n (arg0
, arg1
, type
, mpfr_yn
,
10128 CASE_FLT_FN (BUILT_IN_DREM
):
10129 CASE_FLT_FN (BUILT_IN_REMAINDER
):
10130 if (validate_arg (arg0
, REAL_TYPE
)
10131 && validate_arg(arg1
, REAL_TYPE
))
10132 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_remainder
);
10135 CASE_FLT_FN_REENT (BUILT_IN_GAMMA
): /* GAMMA_R */
10136 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA
): /* LGAMMA_R */
10137 if (validate_arg (arg0
, REAL_TYPE
)
10138 && validate_arg(arg1
, POINTER_TYPE
))
10139 return do_mpfr_lgamma_r (arg0
, arg1
, type
);
10143 CASE_FLT_FN (BUILT_IN_ATAN2
):
10144 if (validate_arg (arg0
, REAL_TYPE
)
10145 && validate_arg(arg1
, REAL_TYPE
))
10146 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_atan2
);
10149 CASE_FLT_FN (BUILT_IN_FDIM
):
10150 if (validate_arg (arg0
, REAL_TYPE
)
10151 && validate_arg(arg1
, REAL_TYPE
))
10152 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_dim
);
10155 CASE_FLT_FN (BUILT_IN_HYPOT
):
10156 return fold_builtin_hypot (fndecl
, arg0
, arg1
, type
);
10158 CASE_FLT_FN (BUILT_IN_LDEXP
):
10159 return fold_builtin_load_exponent (arg0
, arg1
, type
, /*ldexp=*/true);
10160 CASE_FLT_FN (BUILT_IN_SCALBN
):
10161 CASE_FLT_FN (BUILT_IN_SCALBLN
):
10162 return fold_builtin_load_exponent (arg0
, arg1
, type
, /*ldexp=*/false);
10164 CASE_FLT_FN (BUILT_IN_FREXP
):
10165 return fold_builtin_frexp (arg0
, arg1
, type
);
10167 CASE_FLT_FN (BUILT_IN_MODF
):
10168 return fold_builtin_modf (arg0
, arg1
, type
);
10170 case BUILT_IN_BZERO
:
10171 return fold_builtin_bzero (arg0
, arg1
, ignore
);
10173 case BUILT_IN_FPUTS
:
10174 return fold_builtin_fputs (arg0
, arg1
, ignore
, false, NULL_TREE
);
10176 case BUILT_IN_FPUTS_UNLOCKED
:
10177 return fold_builtin_fputs (arg0
, arg1
, ignore
, true, NULL_TREE
);
10179 case BUILT_IN_STRSTR
:
10180 return fold_builtin_strstr (arg0
, arg1
, type
);
10182 case BUILT_IN_STRCAT
:
10183 return fold_builtin_strcat (arg0
, arg1
);
10185 case BUILT_IN_STRSPN
:
10186 return fold_builtin_strspn (arg0
, arg1
);
10188 case BUILT_IN_STRCSPN
:
10189 return fold_builtin_strcspn (arg0
, arg1
);
10191 case BUILT_IN_STRCHR
:
10192 case BUILT_IN_INDEX
:
10193 return fold_builtin_strchr (arg0
, arg1
, type
);
10195 case BUILT_IN_STRRCHR
:
10196 case BUILT_IN_RINDEX
:
10197 return fold_builtin_strrchr (arg0
, arg1
, type
);
10199 case BUILT_IN_STRCPY
:
10200 return fold_builtin_strcpy (fndecl
, arg0
, arg1
, NULL_TREE
);
10202 case BUILT_IN_STRCMP
:
10203 return fold_builtin_strcmp (arg0
, arg1
);
10205 case BUILT_IN_STRPBRK
:
10206 return fold_builtin_strpbrk (arg0
, arg1
, type
);
10208 case BUILT_IN_EXPECT
:
10209 return fold_builtin_expect (arg0
, arg1
);
10211 CASE_FLT_FN (BUILT_IN_POW
):
10212 return fold_builtin_pow (fndecl
, arg0
, arg1
, type
);
10214 CASE_FLT_FN (BUILT_IN_POWI
):
10215 return fold_builtin_powi (fndecl
, arg0
, arg1
, type
);
10217 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
10218 return fold_builtin_copysign (fndecl
, arg0
, arg1
, type
);
10220 CASE_FLT_FN (BUILT_IN_FMIN
):
10221 return fold_builtin_fmin_fmax (arg0
, arg1
, type
, /*max=*/false);
10223 CASE_FLT_FN (BUILT_IN_FMAX
):
10224 return fold_builtin_fmin_fmax (arg0
, arg1
, type
, /*max=*/true);
10226 case BUILT_IN_ISGREATER
:
10227 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
10228 case BUILT_IN_ISGREATEREQUAL
:
10229 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
10230 case BUILT_IN_ISLESS
:
10231 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
10232 case BUILT_IN_ISLESSEQUAL
:
10233 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
10234 case BUILT_IN_ISLESSGREATER
:
10235 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
10236 case BUILT_IN_ISUNORDERED
:
10237 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNORDERED_EXPR
,
10240 /* We do the folding for va_start in the expander. */
10241 case BUILT_IN_VA_START
:
10244 case BUILT_IN_SPRINTF
:
10245 return fold_builtin_sprintf (arg0
, arg1
, NULL_TREE
, ignore
);
10247 case BUILT_IN_OBJECT_SIZE
:
10248 return fold_builtin_object_size (arg0
, arg1
);
10250 case BUILT_IN_PRINTF
:
10251 case BUILT_IN_PRINTF_UNLOCKED
:
10252 case BUILT_IN_VPRINTF
:
10253 return fold_builtin_printf (fndecl
, arg0
, arg1
, ignore
, fcode
);
10255 case BUILT_IN_PRINTF_CHK
:
10256 case BUILT_IN_VPRINTF_CHK
:
10257 if (!validate_arg (arg0
, INTEGER_TYPE
)
10258 || TREE_SIDE_EFFECTS (arg0
))
10261 return fold_builtin_printf (fndecl
, arg1
, NULL_TREE
, ignore
, fcode
);
10264 case BUILT_IN_FPRINTF
:
10265 case BUILT_IN_FPRINTF_UNLOCKED
:
10266 case BUILT_IN_VFPRINTF
:
10267 return fold_builtin_fprintf (fndecl
, arg0
, arg1
, NULL_TREE
,
10276 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10277 and ARG2. IGNORE is true if the result of the function call is ignored.
10278 This function returns NULL_TREE if no simplification was possible. */
10281 fold_builtin_3 (tree fndecl
, tree arg0
, tree arg1
, tree arg2
, bool ignore
)
10283 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
10284 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10288 CASE_FLT_FN (BUILT_IN_SINCOS
):
10289 return fold_builtin_sincos (arg0
, arg1
, arg2
);
10291 CASE_FLT_FN (BUILT_IN_FMA
):
10292 if (validate_arg (arg0
, REAL_TYPE
)
10293 && validate_arg(arg1
, REAL_TYPE
)
10294 && validate_arg(arg2
, REAL_TYPE
))
10295 return do_mpfr_arg3 (arg0
, arg1
, arg2
, type
, mpfr_fma
);
10298 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
10299 CASE_FLT_FN (BUILT_IN_REMQUO
):
10300 if (validate_arg (arg0
, REAL_TYPE
)
10301 && validate_arg(arg1
, REAL_TYPE
)
10302 && validate_arg(arg2
, POINTER_TYPE
))
10303 return do_mpfr_remquo (arg0
, arg1
, arg2
);
10307 case BUILT_IN_MEMSET
:
10308 return fold_builtin_memset (arg0
, arg1
, arg2
, type
, ignore
);
10310 case BUILT_IN_BCOPY
:
10311 return fold_builtin_memory_op (arg1
, arg0
, arg2
, void_type_node
, true, /*endp=*/3);
10313 case BUILT_IN_MEMCPY
:
10314 return fold_builtin_memory_op (arg0
, arg1
, arg2
, type
, ignore
, /*endp=*/0);
10316 case BUILT_IN_MEMPCPY
:
10317 return fold_builtin_memory_op (arg0
, arg1
, arg2
, type
, ignore
, /*endp=*/1);
10319 case BUILT_IN_MEMMOVE
:
10320 return fold_builtin_memory_op (arg0
, arg1
, arg2
, type
, ignore
, /*endp=*/3);
10322 case BUILT_IN_STRNCAT
:
10323 return fold_builtin_strncat (arg0
, arg1
, arg2
);
10325 case BUILT_IN_STRNCPY
:
10326 return fold_builtin_strncpy (fndecl
, arg0
, arg1
, arg2
, NULL_TREE
);
10328 case BUILT_IN_STRNCMP
:
10329 return fold_builtin_strncmp (arg0
, arg1
, arg2
);
10331 case BUILT_IN_MEMCHR
:
10332 return fold_builtin_memchr (arg0
, arg1
, arg2
, type
);
10334 case BUILT_IN_BCMP
:
10335 case BUILT_IN_MEMCMP
:
10336 return fold_builtin_memcmp (arg0
, arg1
, arg2
);;
10338 case BUILT_IN_SPRINTF
:
10339 return fold_builtin_sprintf (arg0
, arg1
, arg2
, ignore
);
10341 case BUILT_IN_STRCPY_CHK
:
10342 case BUILT_IN_STPCPY_CHK
:
10343 return fold_builtin_stxcpy_chk (fndecl
, arg0
, arg1
, arg2
, NULL_TREE
,
10346 case BUILT_IN_STRCAT_CHK
:
10347 return fold_builtin_strcat_chk (fndecl
, arg0
, arg1
, arg2
);
10349 case BUILT_IN_PRINTF_CHK
:
10350 case BUILT_IN_VPRINTF_CHK
:
10351 if (!validate_arg (arg0
, INTEGER_TYPE
)
10352 || TREE_SIDE_EFFECTS (arg0
))
10355 return fold_builtin_printf (fndecl
, arg1
, arg2
, ignore
, fcode
);
10358 case BUILT_IN_FPRINTF
:
10359 case BUILT_IN_FPRINTF_UNLOCKED
:
10360 case BUILT_IN_VFPRINTF
:
10361 return fold_builtin_fprintf (fndecl
, arg0
, arg1
, arg2
, ignore
, fcode
);
10363 case BUILT_IN_FPRINTF_CHK
:
10364 case BUILT_IN_VFPRINTF_CHK
:
10365 if (!validate_arg (arg1
, INTEGER_TYPE
)
10366 || TREE_SIDE_EFFECTS (arg1
))
10369 return fold_builtin_fprintf (fndecl
, arg0
, arg2
, NULL_TREE
,
10378 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10379 ARG2, and ARG3. IGNORE is true if the result of the function call is
10380 ignored. This function returns NULL_TREE if no simplification was
10384 fold_builtin_4 (tree fndecl
, tree arg0
, tree arg1
, tree arg2
, tree arg3
,
10387 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10391 case BUILT_IN_MEMCPY_CHK
:
10392 case BUILT_IN_MEMPCPY_CHK
:
10393 case BUILT_IN_MEMMOVE_CHK
:
10394 case BUILT_IN_MEMSET_CHK
:
10395 return fold_builtin_memory_chk (fndecl
, arg0
, arg1
, arg2
, arg3
,
10397 DECL_FUNCTION_CODE (fndecl
));
10399 case BUILT_IN_STRNCPY_CHK
:
10400 return fold_builtin_strncpy_chk (arg0
, arg1
, arg2
, arg3
, NULL_TREE
);
10402 case BUILT_IN_STRNCAT_CHK
:
10403 return fold_builtin_strncat_chk (fndecl
, arg0
, arg1
, arg2
, arg3
);
10405 case BUILT_IN_FPRINTF_CHK
:
10406 case BUILT_IN_VFPRINTF_CHK
:
10407 if (!validate_arg (arg1
, INTEGER_TYPE
)
10408 || TREE_SIDE_EFFECTS (arg1
))
10411 return fold_builtin_fprintf (fndecl
, arg0
, arg2
, arg3
,
10421 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10422 arguments, where NARGS <= 4. IGNORE is true if the result of the
10423 function call is ignored. This function returns NULL_TREE if no
10424 simplification was possible. Note that this only folds builtins with
10425 fixed argument patterns. Foldings that do varargs-to-varargs
10426 transformations, or that match calls with more than 4 arguments,
10427 need to be handled with fold_builtin_varargs instead. */
10429 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10432 fold_builtin_n (tree fndecl
, tree
*args
, int nargs
, bool ignore
)
10434 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10435 tree ret
= NULL_TREE
;
10437 /* Verify the number of arguments for type-generic and thus variadic
10441 case BUILT_IN_ISFINITE
:
10442 case BUILT_IN_ISINF
:
10443 case BUILT_IN_ISNAN
:
10444 case BUILT_IN_ISNORMAL
:
10447 error ("too few arguments to function %qs",
10448 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
10449 return error_mark_node
;
10451 else if (nargs
> 1)
10453 error ("too many arguments to function %qs",
10454 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
10455 return error_mark_node
;
10459 case BUILT_IN_ISGREATER
:
10460 case BUILT_IN_ISGREATEREQUAL
:
10461 case BUILT_IN_ISLESS
:
10462 case BUILT_IN_ISLESSEQUAL
:
10463 case BUILT_IN_ISLESSGREATER
:
10464 case BUILT_IN_ISUNORDERED
:
10467 error ("too few arguments to function %qs",
10468 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
10469 return error_mark_node
;
10471 else if (nargs
> 2)
10473 error ("too many arguments to function %qs",
10474 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
10475 return error_mark_node
;
10486 ret
= fold_builtin_0 (fndecl
, ignore
);
10489 ret
= fold_builtin_1 (fndecl
, args
[0], ignore
);
10492 ret
= fold_builtin_2 (fndecl
, args
[0], args
[1], ignore
);
10495 ret
= fold_builtin_3 (fndecl
, args
[0], args
[1], args
[2], ignore
);
10498 ret
= fold_builtin_4 (fndecl
, args
[0], args
[1], args
[2], args
[3],
10506 ret
= build1 (NOP_EXPR
, GENERIC_TREE_TYPE (ret
), ret
);
10507 TREE_NO_WARNING (ret
) = 1;
10513 /* Builtins with folding operations that operate on "..." arguments
10514 need special handling; we need to store the arguments in a convenient
10515 data structure before attempting any folding. Fortunately there are
10516 only a few builtins that fall into this category. FNDECL is the
10517 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10518 result of the function call is ignored. */
10521 fold_builtin_varargs (tree fndecl
, tree exp
, bool ignore ATTRIBUTE_UNUSED
)
10523 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10524 tree ret
= NULL_TREE
;
10528 case BUILT_IN_SPRINTF_CHK
:
10529 case BUILT_IN_VSPRINTF_CHK
:
10530 ret
= fold_builtin_sprintf_chk (exp
, fcode
);
10533 case BUILT_IN_SNPRINTF_CHK
:
10534 case BUILT_IN_VSNPRINTF_CHK
:
10535 ret
= fold_builtin_snprintf_chk (exp
, NULL_TREE
, fcode
);
10542 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10543 TREE_NO_WARNING (ret
) = 1;
10549 /* A wrapper function for builtin folding that prevents warnings for
10550 "statement without effect" and the like, caused by removing the
10551 call node earlier than the warning is generated. */
10554 fold_call_expr (tree exp
, bool ignore
)
10556 tree ret
= NULL_TREE
;
10557 tree fndecl
= get_callee_fndecl (exp
);
10559 && TREE_CODE (fndecl
) == FUNCTION_DECL
10560 && DECL_BUILT_IN (fndecl
)
10561 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10562 yet. Defer folding until we see all the arguments
10563 (after inlining). */
10564 && !CALL_EXPR_VA_ARG_PACK (exp
))
10566 int nargs
= call_expr_nargs (exp
);
10568 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10569 instead last argument is __builtin_va_arg_pack (). Defer folding
10570 even in that case, until arguments are finalized. */
10571 if (nargs
&& TREE_CODE (CALL_EXPR_ARG (exp
, nargs
- 1)) == CALL_EXPR
)
10573 tree fndecl2
= get_callee_fndecl (CALL_EXPR_ARG (exp
, nargs
- 1));
10575 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10576 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10577 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10581 /* FIXME: Don't use a list in this interface. */
10582 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10583 return targetm
.fold_builtin (fndecl
, CALL_EXPR_ARGS (exp
), ignore
);
10586 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
10588 tree
*args
= CALL_EXPR_ARGP (exp
);
10589 ret
= fold_builtin_n (fndecl
, args
, nargs
, ignore
);
10592 ret
= fold_builtin_varargs (fndecl
, exp
, ignore
);
10595 /* Propagate location information from original call to
10596 expansion of builtin. Otherwise things like
10597 maybe_emit_chk_warning, that operate on the expansion
10598 of a builtin, will use the wrong location information. */
10599 if (CAN_HAVE_LOCATION_P (exp
) && EXPR_HAS_LOCATION (exp
))
10601 tree realret
= ret
;
10602 if (TREE_CODE (ret
) == NOP_EXPR
)
10603 realret
= TREE_OPERAND (ret
, 0);
10604 if (CAN_HAVE_LOCATION_P (realret
)
10605 && !EXPR_HAS_LOCATION (realret
))
10606 SET_EXPR_LOCATION (realret
, EXPR_LOCATION (exp
));
10615 /* Conveniently construct a function call expression. FNDECL names the
10616 function to be called and ARGLIST is a TREE_LIST of arguments. */
10619 build_function_call_expr (tree fndecl
, tree arglist
)
10621 tree fntype
= TREE_TYPE (fndecl
);
10622 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10623 int n
= list_length (arglist
);
10624 tree
*argarray
= (tree
*) alloca (n
* sizeof (tree
));
10627 for (i
= 0; i
< n
; i
++, arglist
= TREE_CHAIN (arglist
))
10628 argarray
[i
] = TREE_VALUE (arglist
);
10629 return fold_builtin_call_array (TREE_TYPE (fntype
), fn
, n
, argarray
);
10632 /* Conveniently construct a function call expression. FNDECL names the
10633 function to be called, N is the number of arguments, and the "..."
10634 parameters are the argument expressions. */
10637 build_call_expr (tree fndecl
, int n
, ...)
10640 tree fntype
= TREE_TYPE (fndecl
);
10641 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10642 tree
*argarray
= (tree
*) alloca (n
* sizeof (tree
));
10646 for (i
= 0; i
< n
; i
++)
10647 argarray
[i
] = va_arg (ap
, tree
);
10649 return fold_builtin_call_array (TREE_TYPE (fntype
), fn
, n
, argarray
);
10652 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10653 N arguments are passed in the array ARGARRAY. */
10656 fold_builtin_call_array (tree type
,
10661 tree ret
= NULL_TREE
;
10665 if (TREE_CODE (fn
) == ADDR_EXPR
)
10667 tree fndecl
= TREE_OPERAND (fn
, 0);
10668 if (TREE_CODE (fndecl
) == FUNCTION_DECL
10669 && DECL_BUILT_IN (fndecl
))
10671 /* If last argument is __builtin_va_arg_pack (), arguments to this
10672 function are not finalized yet. Defer folding until they are. */
10673 if (n
&& TREE_CODE (argarray
[n
- 1]) == CALL_EXPR
)
10675 tree fndecl2
= get_callee_fndecl (argarray
[n
- 1]);
10677 && TREE_CODE (fndecl2
) == FUNCTION_DECL
10678 && DECL_BUILT_IN_CLASS (fndecl2
) == BUILT_IN_NORMAL
10679 && DECL_FUNCTION_CODE (fndecl2
) == BUILT_IN_VA_ARG_PACK
)
10680 return build_call_array (type
, fn
, n
, argarray
);
10682 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10684 tree arglist
= NULL_TREE
;
10685 for (i
= n
- 1; i
>= 0; i
--)
10686 arglist
= tree_cons (NULL_TREE
, argarray
[i
], arglist
);
10687 ret
= targetm
.fold_builtin (fndecl
, arglist
, false);
10691 else if (n
<= MAX_ARGS_TO_FOLD_BUILTIN
)
10693 /* First try the transformations that don't require consing up
10695 ret
= fold_builtin_n (fndecl
, argarray
, n
, false);
10700 /* If we got this far, we need to build an exp. */
10701 exp
= build_call_array (type
, fn
, n
, argarray
);
10702 ret
= fold_builtin_varargs (fndecl
, exp
, false);
10703 return ret
? ret
: exp
;
10707 return build_call_array (type
, fn
, n
, argarray
);
10710 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10711 along with N new arguments specified as the "..." parameters. SKIP
10712 is the number of arguments in EXP to be omitted. This function is used
10713 to do varargs-to-varargs transformations. */
10716 rewrite_call_expr (tree exp
, int skip
, tree fndecl
, int n
, ...)
10718 int oldnargs
= call_expr_nargs (exp
);
10719 int nargs
= oldnargs
- skip
+ n
;
10720 tree fntype
= TREE_TYPE (fndecl
);
10721 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10729 buffer
= alloca (nargs
* sizeof (tree
));
10731 for (i
= 0; i
< n
; i
++)
10732 buffer
[i
] = va_arg (ap
, tree
);
10734 for (j
= skip
; j
< oldnargs
; j
++, i
++)
10735 buffer
[i
] = CALL_EXPR_ARG (exp
, j
);
10738 buffer
= CALL_EXPR_ARGP (exp
) + skip
;
10740 return fold (build_call_array (TREE_TYPE (exp
), fn
, nargs
, buffer
));
10743 /* Validate a single argument ARG against a tree code CODE representing
10747 validate_arg (const_tree arg
, enum tree_code code
)
10751 else if (code
== POINTER_TYPE
)
10752 return POINTER_TYPE_P (TREE_TYPE (arg
));
10753 return code
== TREE_CODE (TREE_TYPE (arg
));
10756 /* This function validates the types of a function call argument list
10757 against a specified list of tree_codes. If the last specifier is a 0,
10758 that represents an ellipses, otherwise the last specifier must be a
10762 validate_arglist (const_tree callexpr
, ...)
10764 enum tree_code code
;
10767 const_call_expr_arg_iterator iter
;
10770 va_start (ap
, callexpr
);
10771 init_const_call_expr_arg_iterator (callexpr
, &iter
);
10775 code
= va_arg (ap
, enum tree_code
);
10779 /* This signifies an ellipses, any further arguments are all ok. */
10783 /* This signifies an endlink, if no arguments remain, return
10784 true, otherwise return false. */
10785 res
= !more_const_call_expr_args_p (&iter
);
10788 /* If no parameters remain or the parameter's code does not
10789 match the specified code, return false. Otherwise continue
10790 checking any remaining arguments. */
10791 arg
= next_const_call_expr_arg (&iter
);
10792 if (!validate_arg (arg
, code
))
10799 /* We need gotos here since we can only have one VA_CLOSE in a
10807 /* Default target-specific builtin expander that does nothing. */
10810 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
10811 rtx target ATTRIBUTE_UNUSED
,
10812 rtx subtarget ATTRIBUTE_UNUSED
,
10813 enum machine_mode mode ATTRIBUTE_UNUSED
,
10814 int ignore ATTRIBUTE_UNUSED
)
10819 /* Returns true is EXP represents data that would potentially reside
10820 in a readonly section. */
10823 readonly_data_expr (tree exp
)
10827 if (TREE_CODE (exp
) != ADDR_EXPR
)
10830 exp
= get_base_address (TREE_OPERAND (exp
, 0));
10834 /* Make sure we call decl_readonly_section only for trees it
10835 can handle (since it returns true for everything it doesn't
10837 if (TREE_CODE (exp
) == STRING_CST
10838 || TREE_CODE (exp
) == CONSTRUCTOR
10839 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
10840 return decl_readonly_section (exp
, 0);
10845 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10846 to the call, and TYPE is its return type.
10848 Return NULL_TREE if no simplification was possible, otherwise return the
10849 simplified form of the call as a tree.
10851 The simplified form may be a constant or other expression which
10852 computes the same value, but in a more efficient manner (including
10853 calls to other builtin functions).
10855 The call may contain arguments which need to be evaluated, but
10856 which are not useful to determine the result of the call. In
10857 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10858 COMPOUND_EXPR will be an argument which must be evaluated.
10859 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10860 COMPOUND_EXPR in the chain will contain the tree for the simplified
10861 form of the builtin function call. */
10864 fold_builtin_strstr (tree s1
, tree s2
, tree type
)
10866 if (!validate_arg (s1
, POINTER_TYPE
)
10867 || !validate_arg (s2
, POINTER_TYPE
))
10872 const char *p1
, *p2
;
10874 p2
= c_getstr (s2
);
10878 p1
= c_getstr (s1
);
10881 const char *r
= strstr (p1
, p2
);
10885 return build_int_cst (TREE_TYPE (s1
), 0);
10887 /* Return an offset into the constant string argument. */
10888 tem
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (s1
),
10889 s1
, size_int (r
- p1
));
10890 return fold_convert (type
, tem
);
10893 /* The argument is const char *, and the result is char *, so we need
10894 a type conversion here to avoid a warning. */
10896 return fold_convert (type
, s1
);
10901 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
10905 /* New argument list transforming strstr(s1, s2) to
10906 strchr(s1, s2[0]). */
10907 return build_call_expr (fn
, 2, s1
, build_int_cst (NULL_TREE
, p2
[0]));
10911 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10912 the call, and TYPE is its return type.
10914 Return NULL_TREE if no simplification was possible, otherwise return the
10915 simplified form of the call as a tree.
10917 The simplified form may be a constant or other expression which
10918 computes the same value, but in a more efficient manner (including
10919 calls to other builtin functions).
10921 The call may contain arguments which need to be evaluated, but
10922 which are not useful to determine the result of the call. In
10923 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10924 COMPOUND_EXPR will be an argument which must be evaluated.
10925 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10926 COMPOUND_EXPR in the chain will contain the tree for the simplified
10927 form of the builtin function call. */
10930 fold_builtin_strchr (tree s1
, tree s2
, tree type
)
10932 if (!validate_arg (s1
, POINTER_TYPE
)
10933 || !validate_arg (s2
, INTEGER_TYPE
))
10939 if (TREE_CODE (s2
) != INTEGER_CST
)
10942 p1
= c_getstr (s1
);
10949 if (target_char_cast (s2
, &c
))
10952 r
= strchr (p1
, c
);
10955 return build_int_cst (TREE_TYPE (s1
), 0);
10957 /* Return an offset into the constant string argument. */
10958 tem
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (s1
),
10959 s1
, size_int (r
- p1
));
10960 return fold_convert (type
, tem
);
10966 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10967 the call, and TYPE is its return type.
10969 Return NULL_TREE if no simplification was possible, otherwise return the
10970 simplified form of the call as a tree.
10972 The simplified form may be a constant or other expression which
10973 computes the same value, but in a more efficient manner (including
10974 calls to other builtin functions).
10976 The call may contain arguments which need to be evaluated, but
10977 which are not useful to determine the result of the call. In
10978 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10979 COMPOUND_EXPR will be an argument which must be evaluated.
10980 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10981 COMPOUND_EXPR in the chain will contain the tree for the simplified
10982 form of the builtin function call. */
10985 fold_builtin_strrchr (tree s1
, tree s2
, tree type
)
10987 if (!validate_arg (s1
, POINTER_TYPE
)
10988 || !validate_arg (s2
, INTEGER_TYPE
))
10995 if (TREE_CODE (s2
) != INTEGER_CST
)
10998 p1
= c_getstr (s1
);
11005 if (target_char_cast (s2
, &c
))
11008 r
= strrchr (p1
, c
);
11011 return build_int_cst (TREE_TYPE (s1
), 0);
11013 /* Return an offset into the constant string argument. */
11014 tem
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (s1
),
11015 s1
, size_int (r
- p1
));
11016 return fold_convert (type
, tem
);
11019 if (! integer_zerop (s2
))
11022 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
11026 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11027 return build_call_expr (fn
, 2, s1
, s2
);
11031 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11032 to the call, and TYPE is its return type.
11034 Return NULL_TREE if no simplification was possible, otherwise return the
11035 simplified form of the call as a tree.
11037 The simplified form may be a constant or other expression which
11038 computes the same value, but in a more efficient manner (including
11039 calls to other builtin functions).
11041 The call may contain arguments which need to be evaluated, but
11042 which are not useful to determine the result of the call. In
11043 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11044 COMPOUND_EXPR will be an argument which must be evaluated.
11045 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11046 COMPOUND_EXPR in the chain will contain the tree for the simplified
11047 form of the builtin function call. */
11050 fold_builtin_strpbrk (tree s1
, tree s2
, tree type
)
11052 if (!validate_arg (s1
, POINTER_TYPE
)
11053 || !validate_arg (s2
, POINTER_TYPE
))
11058 const char *p1
, *p2
;
11060 p2
= c_getstr (s2
);
11064 p1
= c_getstr (s1
);
11067 const char *r
= strpbrk (p1
, p2
);
11071 return build_int_cst (TREE_TYPE (s1
), 0);
11073 /* Return an offset into the constant string argument. */
11074 tem
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (s1
),
11075 s1
, size_int (r
- p1
));
11076 return fold_convert (type
, tem
);
11080 /* strpbrk(x, "") == NULL.
11081 Evaluate and ignore s1 in case it had side-effects. */
11082 return omit_one_operand (TREE_TYPE (s1
), integer_zero_node
, s1
);
11085 return NULL_TREE
; /* Really call strpbrk. */
11087 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
11091 /* New argument list transforming strpbrk(s1, s2) to
11092 strchr(s1, s2[0]). */
11093 return build_call_expr (fn
, 2, s1
, build_int_cst (NULL_TREE
, p2
[0]));
11097 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11100 Return NULL_TREE if no simplification was possible, otherwise return the
11101 simplified form of the call as a tree.
11103 The simplified form may be a constant or other expression which
11104 computes the same value, but in a more efficient manner (including
11105 calls to other builtin functions).
11107 The call may contain arguments which need to be evaluated, but
11108 which are not useful to determine the result of the call. In
11109 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11110 COMPOUND_EXPR will be an argument which must be evaluated.
11111 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11112 COMPOUND_EXPR in the chain will contain the tree for the simplified
11113 form of the builtin function call. */
11116 fold_builtin_strcat (tree dst
, tree src
)
11118 if (!validate_arg (dst
, POINTER_TYPE
)
11119 || !validate_arg (src
, POINTER_TYPE
))
11123 const char *p
= c_getstr (src
);
11125 /* If the string length is zero, return the dst parameter. */
11126 if (p
&& *p
== '\0')
11133 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11134 arguments to the call.
11136 Return NULL_TREE if no simplification was possible, otherwise return the
11137 simplified form of the call as a tree.
11139 The simplified form may be a constant or other expression which
11140 computes the same value, but in a more efficient manner (including
11141 calls to other builtin functions).
11143 The call may contain arguments which need to be evaluated, but
11144 which are not useful to determine the result of the call. In
11145 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11146 COMPOUND_EXPR will be an argument which must be evaluated.
11147 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11148 COMPOUND_EXPR in the chain will contain the tree for the simplified
11149 form of the builtin function call. */
11152 fold_builtin_strncat (tree dst
, tree src
, tree len
)
11154 if (!validate_arg (dst
, POINTER_TYPE
)
11155 || !validate_arg (src
, POINTER_TYPE
)
11156 || !validate_arg (len
, INTEGER_TYPE
))
11160 const char *p
= c_getstr (src
);
11162 /* If the requested length is zero, or the src parameter string
11163 length is zero, return the dst parameter. */
11164 if (integer_zerop (len
) || (p
&& *p
== '\0'))
11165 return omit_two_operands (TREE_TYPE (dst
), dst
, src
, len
);
11167 /* If the requested len is greater than or equal to the string
11168 length, call strcat. */
11169 if (TREE_CODE (len
) == INTEGER_CST
&& p
11170 && compare_tree_int (len
, strlen (p
)) >= 0)
11172 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCAT
];
11174 /* If the replacement _DECL isn't initialized, don't do the
11179 return build_call_expr (fn
, 2, dst
, src
);
11185 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11188 Return NULL_TREE if no simplification was possible, otherwise return the
11189 simplified form of the call as a tree.
11191 The simplified form may be a constant or other expression which
11192 computes the same value, but in a more efficient manner (including
11193 calls to other builtin functions).
11195 The call may contain arguments which need to be evaluated, but
11196 which are not useful to determine the result of the call. In
11197 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11198 COMPOUND_EXPR will be an argument which must be evaluated.
11199 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11200 COMPOUND_EXPR in the chain will contain the tree for the simplified
11201 form of the builtin function call. */
11204 fold_builtin_strspn (tree s1
, tree s2
)
11206 if (!validate_arg (s1
, POINTER_TYPE
)
11207 || !validate_arg (s2
, POINTER_TYPE
))
11211 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11213 /* If both arguments are constants, evaluate at compile-time. */
11216 const size_t r
= strspn (p1
, p2
);
11217 return size_int (r
);
11220 /* If either argument is "", return NULL_TREE. */
11221 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
11222 /* Evaluate and ignore both arguments in case either one has
11224 return omit_two_operands (integer_type_node
, integer_zero_node
,
11230 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11233 Return NULL_TREE if no simplification was possible, otherwise return the
11234 simplified form of the call as a tree.
11236 The simplified form may be a constant or other expression which
11237 computes the same value, but in a more efficient manner (including
11238 calls to other builtin functions).
11240 The call may contain arguments which need to be evaluated, but
11241 which are not useful to determine the result of the call. In
11242 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11243 COMPOUND_EXPR will be an argument which must be evaluated.
11244 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11245 COMPOUND_EXPR in the chain will contain the tree for the simplified
11246 form of the builtin function call. */
11249 fold_builtin_strcspn (tree s1
, tree s2
)
11251 if (!validate_arg (s1
, POINTER_TYPE
)
11252 || !validate_arg (s2
, POINTER_TYPE
))
11256 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
11258 /* If both arguments are constants, evaluate at compile-time. */
11261 const size_t r
= strcspn (p1
, p2
);
11262 return size_int (r
);
11265 /* If the first argument is "", return NULL_TREE. */
11266 if (p1
&& *p1
== '\0')
11268 /* Evaluate and ignore argument s2 in case it has
11270 return omit_one_operand (integer_type_node
,
11271 integer_zero_node
, s2
);
11274 /* If the second argument is "", return __builtin_strlen(s1). */
11275 if (p2
&& *p2
== '\0')
11277 tree fn
= implicit_built_in_decls
[BUILT_IN_STRLEN
];
11279 /* If the replacement _DECL isn't initialized, don't do the
11284 return build_call_expr (fn
, 1, s1
);
11290 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11291 to the call. IGNORE is true if the value returned
11292 by the builtin will be ignored. UNLOCKED is true is true if this
11293 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11294 the known length of the string. Return NULL_TREE if no simplification
11298 fold_builtin_fputs (tree arg0
, tree arg1
, bool ignore
, bool unlocked
, tree len
)
11300 /* If we're using an unlocked function, assume the other unlocked
11301 functions exist explicitly. */
11302 tree
const fn_fputc
= unlocked
? built_in_decls
[BUILT_IN_FPUTC_UNLOCKED
]
11303 : implicit_built_in_decls
[BUILT_IN_FPUTC
];
11304 tree
const fn_fwrite
= unlocked
? built_in_decls
[BUILT_IN_FWRITE_UNLOCKED
]
11305 : implicit_built_in_decls
[BUILT_IN_FWRITE
];
11307 /* If the return value is used, don't do the transformation. */
11311 /* Verify the arguments in the original call. */
11312 if (!validate_arg (arg0
, POINTER_TYPE
)
11313 || !validate_arg (arg1
, POINTER_TYPE
))
11317 len
= c_strlen (arg0
, 0);
11319 /* Get the length of the string passed to fputs. If the length
11320 can't be determined, punt. */
11322 || TREE_CODE (len
) != INTEGER_CST
)
11325 switch (compare_tree_int (len
, 1))
11327 case -1: /* length is 0, delete the call entirely . */
11328 return omit_one_operand (integer_type_node
, integer_zero_node
, arg1
);;
11330 case 0: /* length is 1, call fputc. */
11332 const char *p
= c_getstr (arg0
);
11337 return build_call_expr (fn_fputc
, 2,
11338 build_int_cst (NULL_TREE
, p
[0]), arg1
);
11344 case 1: /* length is greater than 1, call fwrite. */
11346 /* If optimizing for size keep fputs. */
11349 /* New argument list transforming fputs(string, stream) to
11350 fwrite(string, 1, len, stream). */
11352 return build_call_expr (fn_fwrite
, 4, arg0
, size_one_node
, len
, arg1
);
11357 gcc_unreachable ();
11362 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
11363 produced. False otherwise. This is done so that we don't output the error
11364 or warning twice or three times. */
11366 fold_builtin_next_arg (tree exp
, bool va_start_p
)
11368 tree fntype
= TREE_TYPE (current_function_decl
);
11369 int nargs
= call_expr_nargs (exp
);
11372 if (TYPE_ARG_TYPES (fntype
) == 0
11373 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
11374 == void_type_node
))
11376 error ("%<va_start%> used in function with fixed args");
11382 if (va_start_p
&& (nargs
!= 2))
11384 error ("wrong number of arguments to function %<va_start%>");
11387 arg
= CALL_EXPR_ARG (exp
, 1);
11389 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11390 when we checked the arguments and if needed issued a warning. */
11395 /* Evidently an out of date version of <stdarg.h>; can't validate
11396 va_start's second argument, but can still work as intended. */
11397 warning (0, "%<__builtin_next_arg%> called without an argument");
11400 else if (nargs
> 1)
11402 error ("wrong number of arguments to function %<__builtin_next_arg%>");
11405 arg
= CALL_EXPR_ARG (exp
, 0);
11408 /* We destructively modify the call to be __builtin_va_start (ap, 0)
11409 or __builtin_next_arg (0) the first time we see it, after checking
11410 the arguments and if needed issuing a warning. */
11411 if (!integer_zerop (arg
))
11413 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
11415 /* Strip off all nops for the sake of the comparison. This
11416 is not quite the same as STRIP_NOPS. It does more.
11417 We must also strip off INDIRECT_EXPR for C++ reference
11419 while (TREE_CODE (arg
) == NOP_EXPR
11420 || TREE_CODE (arg
) == CONVERT_EXPR
11421 || TREE_CODE (arg
) == NON_LVALUE_EXPR
11422 || TREE_CODE (arg
) == INDIRECT_REF
)
11423 arg
= TREE_OPERAND (arg
, 0);
11424 if (arg
!= last_parm
)
11426 /* FIXME: Sometimes with the tree optimizers we can get the
11427 not the last argument even though the user used the last
11428 argument. We just warn and set the arg to be the last
11429 argument so that we will get wrong-code because of
11431 warning (0, "second parameter of %<va_start%> not last named argument");
11433 /* We want to verify the second parameter just once before the tree
11434 optimizers are run and then avoid keeping it in the tree,
11435 as otherwise we could warn even for correct code like:
11436 void foo (int i, ...)
11437 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11439 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
11441 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
11447 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11448 ORIG may be null if this is a 2-argument call. We don't attempt to
11449 simplify calls with more than 3 arguments.
11451 Return NULL_TREE if no simplification was possible, otherwise return the
11452 simplified form of the call as a tree. If IGNORED is true, it means that
11453 the caller does not use the returned value of the function. */
11456 fold_builtin_sprintf (tree dest
, tree fmt
, tree orig
, int ignored
)
11459 const char *fmt_str
= NULL
;
11461 /* Verify the required arguments in the original call. We deal with two
11462 types of sprintf() calls: 'sprintf (str, fmt)' and
11463 'sprintf (dest, "%s", orig)'. */
11464 if (!validate_arg (dest
, POINTER_TYPE
)
11465 || !validate_arg (fmt
, POINTER_TYPE
))
11467 if (orig
&& !validate_arg (orig
, POINTER_TYPE
))
11470 /* Check whether the format is a literal string constant. */
11471 fmt_str
= c_getstr (fmt
);
11472 if (fmt_str
== NULL
)
11476 retval
= NULL_TREE
;
11478 if (!init_target_chars ())
11481 /* If the format doesn't contain % args or %%, use strcpy. */
11482 if (strchr (fmt_str
, target_percent
) == NULL
)
11484 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
11489 /* Don't optimize sprintf (buf, "abc", ptr++). */
11493 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11494 'format' is known to contain no % formats. */
11495 call
= build_call_expr (fn
, 2, dest
, fmt
);
11497 retval
= build_int_cst (NULL_TREE
, strlen (fmt_str
));
11500 /* If the format is "%s", use strcpy if the result isn't used. */
11501 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
11504 fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
11509 /* Don't crash on sprintf (str1, "%s"). */
11513 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11516 retval
= c_strlen (orig
, 1);
11517 if (!retval
|| TREE_CODE (retval
) != INTEGER_CST
)
11520 call
= build_call_expr (fn
, 2, dest
, orig
);
11523 if (call
&& retval
)
11525 retval
= fold_convert
11526 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls
[BUILT_IN_SPRINTF
])),
11528 return build2 (COMPOUND_EXPR
, TREE_TYPE (retval
), call
, retval
);
11534 /* Expand a call EXP to __builtin_object_size. */
11537 expand_builtin_object_size (tree exp
)
11540 int object_size_type
;
11541 tree fndecl
= get_callee_fndecl (exp
);
11543 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11545 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11547 expand_builtin_trap ();
11551 ost
= CALL_EXPR_ARG (exp
, 1);
11554 if (TREE_CODE (ost
) != INTEGER_CST
11555 || tree_int_cst_sgn (ost
) < 0
11556 || compare_tree_int (ost
, 3) > 0)
11558 error ("%Klast argument of %D is not integer constant between 0 and 3",
11560 expand_builtin_trap ();
11564 object_size_type
= tree_low_cst (ost
, 0);
11566 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
11569 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11570 FCODE is the BUILT_IN_* to use.
11571 Return NULL_RTX if we failed; the caller should emit a normal call,
11572 otherwise try to get the result in TARGET, if convenient (and in
11573 mode MODE if that's convenient). */
11576 expand_builtin_memory_chk (tree exp
, rtx target
, enum machine_mode mode
,
11577 enum built_in_function fcode
)
11579 tree dest
, src
, len
, size
;
11581 if (!validate_arglist (exp
,
11583 fcode
== BUILT_IN_MEMSET_CHK
11584 ? INTEGER_TYPE
: POINTER_TYPE
,
11585 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11588 dest
= CALL_EXPR_ARG (exp
, 0);
11589 src
= CALL_EXPR_ARG (exp
, 1);
11590 len
= CALL_EXPR_ARG (exp
, 2);
11591 size
= CALL_EXPR_ARG (exp
, 3);
11593 if (! host_integerp (size
, 1))
11596 if (host_integerp (len
, 1) || integer_all_onesp (size
))
11600 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
11602 warning (0, "%Kcall to %D will always overflow destination buffer",
11603 exp
, get_callee_fndecl (exp
));
11608 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11609 mem{cpy,pcpy,move,set} is available. */
11612 case BUILT_IN_MEMCPY_CHK
:
11613 fn
= built_in_decls
[BUILT_IN_MEMCPY
];
11615 case BUILT_IN_MEMPCPY_CHK
:
11616 fn
= built_in_decls
[BUILT_IN_MEMPCPY
];
11618 case BUILT_IN_MEMMOVE_CHK
:
11619 fn
= built_in_decls
[BUILT_IN_MEMMOVE
];
11621 case BUILT_IN_MEMSET_CHK
:
11622 fn
= built_in_decls
[BUILT_IN_MEMSET
];
11631 fn
= build_call_expr (fn
, 3, dest
, src
, len
);
11632 STRIP_TYPE_NOPS (fn
);
11633 while (TREE_CODE (fn
) == COMPOUND_EXPR
)
11635 expand_expr (TREE_OPERAND (fn
, 0), const0_rtx
, VOIDmode
,
11637 fn
= TREE_OPERAND (fn
, 1);
11639 if (TREE_CODE (fn
) == CALL_EXPR
)
11640 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11641 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11643 else if (fcode
== BUILT_IN_MEMSET_CHK
)
11647 unsigned int dest_align
11648 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
11650 /* If DEST is not a pointer type, call the normal function. */
11651 if (dest_align
== 0)
11654 /* If SRC and DEST are the same (and not volatile), do nothing. */
11655 if (operand_equal_p (src
, dest
, 0))
11659 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
11661 /* Evaluate and ignore LEN in case it has side-effects. */
11662 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
11663 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
11666 expr
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (dest
), dest
, len
);
11667 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
11670 /* __memmove_chk special case. */
11671 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
11673 unsigned int src_align
11674 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
11676 if (src_align
== 0)
11679 /* If src is categorized for a readonly section we can use
11680 normal __memcpy_chk. */
11681 if (readonly_data_expr (src
))
11683 tree fn
= built_in_decls
[BUILT_IN_MEMCPY_CHK
];
11686 fn
= build_call_expr (fn
, 4, dest
, src
, len
, size
);
11687 STRIP_TYPE_NOPS (fn
);
11688 while (TREE_CODE (fn
) == COMPOUND_EXPR
)
11690 expand_expr (TREE_OPERAND (fn
, 0), const0_rtx
, VOIDmode
,
11692 fn
= TREE_OPERAND (fn
, 1);
11694 if (TREE_CODE (fn
) == CALL_EXPR
)
11695 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11696 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11703 /* Emit warning if a buffer overflow is detected at compile time. */
11706 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
11713 case BUILT_IN_STRCPY_CHK
:
11714 case BUILT_IN_STPCPY_CHK
:
11715 /* For __strcat_chk the warning will be emitted only if overflowing
11716 by at least strlen (dest) + 1 bytes. */
11717 case BUILT_IN_STRCAT_CHK
:
11718 len
= CALL_EXPR_ARG (exp
, 1);
11719 size
= CALL_EXPR_ARG (exp
, 2);
11722 case BUILT_IN_STRNCAT_CHK
:
11723 case BUILT_IN_STRNCPY_CHK
:
11724 len
= CALL_EXPR_ARG (exp
, 2);
11725 size
= CALL_EXPR_ARG (exp
, 3);
11727 case BUILT_IN_SNPRINTF_CHK
:
11728 case BUILT_IN_VSNPRINTF_CHK
:
11729 len
= CALL_EXPR_ARG (exp
, 1);
11730 size
= CALL_EXPR_ARG (exp
, 3);
11733 gcc_unreachable ();
11739 if (! host_integerp (size
, 1) || integer_all_onesp (size
))
11744 len
= c_strlen (len
, 1);
11745 if (! len
|| ! host_integerp (len
, 1) || tree_int_cst_lt (len
, size
))
11748 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
11750 tree src
= CALL_EXPR_ARG (exp
, 1);
11751 if (! src
|| ! host_integerp (len
, 1) || tree_int_cst_lt (len
, size
))
11753 src
= c_strlen (src
, 1);
11754 if (! src
|| ! host_integerp (src
, 1))
11756 warning (0, "%Kcall to %D might overflow destination buffer",
11757 exp
, get_callee_fndecl (exp
));
11760 else if (tree_int_cst_lt (src
, size
))
11763 else if (! host_integerp (len
, 1) || ! tree_int_cst_lt (size
, len
))
11766 warning (0, "%Kcall to %D will always overflow destination buffer",
11767 exp
, get_callee_fndecl (exp
));
11770 /* Emit warning if a buffer overflow is detected at compile time
11771 in __sprintf_chk/__vsprintf_chk calls. */
11774 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
11776 tree dest
, size
, len
, fmt
, flag
;
11777 const char *fmt_str
;
11778 int nargs
= call_expr_nargs (exp
);
11780 /* Verify the required arguments in the original call. */
11784 dest
= CALL_EXPR_ARG (exp
, 0);
11785 flag
= CALL_EXPR_ARG (exp
, 1);
11786 size
= CALL_EXPR_ARG (exp
, 2);
11787 fmt
= CALL_EXPR_ARG (exp
, 3);
11789 if (! host_integerp (size
, 1) || integer_all_onesp (size
))
11792 /* Check whether the format is a literal string constant. */
11793 fmt_str
= c_getstr (fmt
);
11794 if (fmt_str
== NULL
)
11797 if (!init_target_chars ())
11800 /* If the format doesn't contain % args or %%, we know its size. */
11801 if (strchr (fmt_str
, target_percent
) == 0)
11802 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
11803 /* If the format is "%s" and first ... argument is a string literal,
11805 else if (fcode
== BUILT_IN_SPRINTF_CHK
11806 && strcmp (fmt_str
, target_percent_s
) == 0)
11812 arg
= CALL_EXPR_ARG (exp
, 4);
11813 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
11816 len
= c_strlen (arg
, 1);
11817 if (!len
|| ! host_integerp (len
, 1))
11823 if (! tree_int_cst_lt (len
, size
))
11825 warning (0, "%Kcall to %D will always overflow destination buffer",
11826 exp
, get_callee_fndecl (exp
));
11830 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11834 fold_builtin_object_size (tree ptr
, tree ost
)
11836 tree ret
= NULL_TREE
;
11837 int object_size_type
;
11839 if (!validate_arg (ptr
, POINTER_TYPE
)
11840 || !validate_arg (ost
, INTEGER_TYPE
))
11845 if (TREE_CODE (ost
) != INTEGER_CST
11846 || tree_int_cst_sgn (ost
) < 0
11847 || compare_tree_int (ost
, 3) > 0)
11850 object_size_type
= tree_low_cst (ost
, 0);
11852 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11853 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11854 and (size_t) 0 for types 2 and 3. */
11855 if (TREE_SIDE_EFFECTS (ptr
))
11856 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
11858 if (TREE_CODE (ptr
) == ADDR_EXPR
)
11859 ret
= build_int_cstu (size_type_node
,
11860 compute_builtin_object_size (ptr
, object_size_type
));
11862 else if (TREE_CODE (ptr
) == SSA_NAME
)
11864 unsigned HOST_WIDE_INT bytes
;
11866 /* If object size is not known yet, delay folding until
11867 later. Maybe subsequent passes will help determining
11869 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11870 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
< 2
11872 ret
= build_int_cstu (size_type_node
, bytes
);
11877 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (ret
);
11878 HOST_WIDE_INT high
= TREE_INT_CST_HIGH (ret
);
11879 if (fit_double_type (low
, high
, &low
, &high
, TREE_TYPE (ret
)))
11886 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11887 DEST, SRC, LEN, and SIZE are the arguments to the call.
11888 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11889 code of the builtin. If MAXLEN is not NULL, it is maximum length
11890 passed as third argument. */
11893 fold_builtin_memory_chk (tree fndecl
,
11894 tree dest
, tree src
, tree len
, tree size
,
11895 tree maxlen
, bool ignore
,
11896 enum built_in_function fcode
)
11900 if (!validate_arg (dest
, POINTER_TYPE
)
11901 || !validate_arg (src
,
11902 (fcode
== BUILT_IN_MEMSET_CHK
11903 ? INTEGER_TYPE
: POINTER_TYPE
))
11904 || !validate_arg (len
, INTEGER_TYPE
)
11905 || !validate_arg (size
, INTEGER_TYPE
))
11908 /* If SRC and DEST are the same (and not volatile), return DEST
11909 (resp. DEST+LEN for __mempcpy_chk). */
11910 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
11912 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
11913 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, len
);
11916 tree temp
= fold_build2 (POINTER_PLUS_EXPR
, TREE_TYPE (dest
), dest
, len
);
11917 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), temp
);
11921 if (! host_integerp (size
, 1))
11924 if (! integer_all_onesp (size
))
11926 if (! host_integerp (len
, 1))
11928 /* If LEN is not constant, try MAXLEN too.
11929 For MAXLEN only allow optimizing into non-_ocs function
11930 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11931 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
11933 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
11935 /* (void) __mempcpy_chk () can be optimized into
11936 (void) __memcpy_chk (). */
11937 fn
= built_in_decls
[BUILT_IN_MEMCPY_CHK
];
11941 return build_call_expr (fn
, 4, dest
, src
, len
, size
);
11949 if (tree_int_cst_lt (size
, maxlen
))
11954 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11955 mem{cpy,pcpy,move,set} is available. */
11958 case BUILT_IN_MEMCPY_CHK
:
11959 fn
= built_in_decls
[BUILT_IN_MEMCPY
];
11961 case BUILT_IN_MEMPCPY_CHK
:
11962 fn
= built_in_decls
[BUILT_IN_MEMPCPY
];
11964 case BUILT_IN_MEMMOVE_CHK
:
11965 fn
= built_in_decls
[BUILT_IN_MEMMOVE
];
11967 case BUILT_IN_MEMSET_CHK
:
11968 fn
= built_in_decls
[BUILT_IN_MEMSET
];
11977 return build_call_expr (fn
, 3, dest
, src
, len
);
11980 /* Fold a call to the __st[rp]cpy_chk builtin.
11981 DEST, SRC, and SIZE are the arguments to the call.
11982 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
11983 code of the builtin. If MAXLEN is not NULL, it is maximum length of
11984 strings passed as second argument. */
11987 fold_builtin_stxcpy_chk (tree fndecl
, tree dest
, tree src
, tree size
,
11988 tree maxlen
, bool ignore
,
11989 enum built_in_function fcode
)
11993 if (!validate_arg (dest
, POINTER_TYPE
)
11994 || !validate_arg (src
, POINTER_TYPE
)
11995 || !validate_arg (size
, INTEGER_TYPE
))
11998 /* If SRC and DEST are the same (and not volatile), return DEST. */
11999 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
12000 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
12002 if (! host_integerp (size
, 1))
12005 if (! integer_all_onesp (size
))
12007 len
= c_strlen (src
, 1);
12008 if (! len
|| ! host_integerp (len
, 1))
12010 /* If LEN is not constant, try MAXLEN too.
12011 For MAXLEN only allow optimizing into non-_ocs function
12012 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12013 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12015 if (fcode
== BUILT_IN_STPCPY_CHK
)
12020 /* If return value of __stpcpy_chk is ignored,
12021 optimize into __strcpy_chk. */
12022 fn
= built_in_decls
[BUILT_IN_STRCPY_CHK
];
12026 return build_call_expr (fn
, 3, dest
, src
, size
);
12029 if (! len
|| TREE_SIDE_EFFECTS (len
))
12032 /* If c_strlen returned something, but not a constant,
12033 transform __strcpy_chk into __memcpy_chk. */
12034 fn
= built_in_decls
[BUILT_IN_MEMCPY_CHK
];
12038 len
= size_binop (PLUS_EXPR
, len
, ssize_int (1));
12039 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)),
12040 build_call_expr (fn
, 4,
12041 dest
, src
, len
, size
));
12047 if (! tree_int_cst_lt (maxlen
, size
))
12051 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12052 fn
= built_in_decls
[fcode
== BUILT_IN_STPCPY_CHK
12053 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
];
12057 return build_call_expr (fn
, 2, dest
, src
);
12060 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
12061 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12062 length passed as third argument. */
12065 fold_builtin_strncpy_chk (tree dest
, tree src
, tree len
, tree size
,
12070 if (!validate_arg (dest
, POINTER_TYPE
)
12071 || !validate_arg (src
, POINTER_TYPE
)
12072 || !validate_arg (len
, INTEGER_TYPE
)
12073 || !validate_arg (size
, INTEGER_TYPE
))
12076 if (! host_integerp (size
, 1))
12079 if (! integer_all_onesp (size
))
12081 if (! host_integerp (len
, 1))
12083 /* If LEN is not constant, try MAXLEN too.
12084 For MAXLEN only allow optimizing into non-_ocs function
12085 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12086 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12092 if (tree_int_cst_lt (size
, maxlen
))
12096 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
12097 fn
= built_in_decls
[BUILT_IN_STRNCPY
];
12101 return build_call_expr (fn
, 3, dest
, src
, len
);
12104 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12105 are the arguments to the call. */
12108 fold_builtin_strcat_chk (tree fndecl
, tree dest
, tree src
, tree size
)
12113 if (!validate_arg (dest
, POINTER_TYPE
)
12114 || !validate_arg (src
, POINTER_TYPE
)
12115 || !validate_arg (size
, INTEGER_TYPE
))
12118 p
= c_getstr (src
);
12119 /* If the SRC parameter is "", return DEST. */
12120 if (p
&& *p
== '\0')
12121 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
12123 if (! host_integerp (size
, 1) || ! integer_all_onesp (size
))
12126 /* If __builtin_strcat_chk is used, assume strcat is available. */
12127 fn
= built_in_decls
[BUILT_IN_STRCAT
];
12131 return build_call_expr (fn
, 2, dest
, src
);
12134 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12138 fold_builtin_strncat_chk (tree fndecl
,
12139 tree dest
, tree src
, tree len
, tree size
)
12144 if (!validate_arg (dest
, POINTER_TYPE
)
12145 || !validate_arg (src
, POINTER_TYPE
)
12146 || !validate_arg (size
, INTEGER_TYPE
)
12147 || !validate_arg (size
, INTEGER_TYPE
))
12150 p
= c_getstr (src
);
12151 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
12152 if (p
&& *p
== '\0')
12153 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, len
);
12154 else if (integer_zerop (len
))
12155 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
12157 if (! host_integerp (size
, 1))
12160 if (! integer_all_onesp (size
))
12162 tree src_len
= c_strlen (src
, 1);
12164 && host_integerp (src_len
, 1)
12165 && host_integerp (len
, 1)
12166 && ! tree_int_cst_lt (len
, src_len
))
12168 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
12169 fn
= built_in_decls
[BUILT_IN_STRCAT_CHK
];
12173 return build_call_expr (fn
, 3, dest
, src
, size
);
12178 /* If __builtin_strncat_chk is used, assume strncat is available. */
12179 fn
= built_in_decls
[BUILT_IN_STRNCAT
];
12183 return build_call_expr (fn
, 3, dest
, src
, len
);
12186 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
12187 a normal call should be emitted rather than expanding the function
12188 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
12191 fold_builtin_sprintf_chk (tree exp
, enum built_in_function fcode
)
12193 tree dest
, size
, len
, fn
, fmt
, flag
;
12194 const char *fmt_str
;
12195 int nargs
= call_expr_nargs (exp
);
12197 /* Verify the required arguments in the original call. */
12200 dest
= CALL_EXPR_ARG (exp
, 0);
12201 if (!validate_arg (dest
, POINTER_TYPE
))
12203 flag
= CALL_EXPR_ARG (exp
, 1);
12204 if (!validate_arg (flag
, INTEGER_TYPE
))
12206 size
= CALL_EXPR_ARG (exp
, 2);
12207 if (!validate_arg (size
, INTEGER_TYPE
))
12209 fmt
= CALL_EXPR_ARG (exp
, 3);
12210 if (!validate_arg (fmt
, POINTER_TYPE
))
12213 if (! host_integerp (size
, 1))
12218 if (!init_target_chars ())
12221 /* Check whether the format is a literal string constant. */
12222 fmt_str
= c_getstr (fmt
);
12223 if (fmt_str
!= NULL
)
12225 /* If the format doesn't contain % args or %%, we know the size. */
12226 if (strchr (fmt_str
, target_percent
) == 0)
12228 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
12229 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
12231 /* If the format is "%s" and first ... argument is a string literal,
12232 we know the size too. */
12233 else if (fcode
== BUILT_IN_SPRINTF_CHK
12234 && strcmp (fmt_str
, target_percent_s
) == 0)
12240 arg
= CALL_EXPR_ARG (exp
, 4);
12241 if (validate_arg (arg
, POINTER_TYPE
))
12243 len
= c_strlen (arg
, 1);
12244 if (! len
|| ! host_integerp (len
, 1))
12251 if (! integer_all_onesp (size
))
12253 if (! len
|| ! tree_int_cst_lt (len
, size
))
12257 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
12258 or if format doesn't contain % chars or is "%s". */
12259 if (! integer_zerop (flag
))
12261 if (fmt_str
== NULL
)
12263 if (strchr (fmt_str
, target_percent
) != NULL
12264 && strcmp (fmt_str
, target_percent_s
))
12268 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
12269 fn
= built_in_decls
[fcode
== BUILT_IN_VSPRINTF_CHK
12270 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
];
12274 return rewrite_call_expr (exp
, 4, fn
, 2, dest
, fmt
);
12277 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
12278 a normal call should be emitted rather than expanding the function
12279 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
12280 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
12281 passed as second argument. */
12284 fold_builtin_snprintf_chk (tree exp
, tree maxlen
,
12285 enum built_in_function fcode
)
12287 tree dest
, size
, len
, fn
, fmt
, flag
;
12288 const char *fmt_str
;
12290 /* Verify the required arguments in the original call. */
12291 if (call_expr_nargs (exp
) < 5)
12293 dest
= CALL_EXPR_ARG (exp
, 0);
12294 if (!validate_arg (dest
, POINTER_TYPE
))
12296 len
= CALL_EXPR_ARG (exp
, 1);
12297 if (!validate_arg (len
, INTEGER_TYPE
))
12299 flag
= CALL_EXPR_ARG (exp
, 2);
12300 if (!validate_arg (flag
, INTEGER_TYPE
))
12302 size
= CALL_EXPR_ARG (exp
, 3);
12303 if (!validate_arg (size
, INTEGER_TYPE
))
12305 fmt
= CALL_EXPR_ARG (exp
, 4);
12306 if (!validate_arg (fmt
, POINTER_TYPE
))
12309 if (! host_integerp (size
, 1))
12312 if (! integer_all_onesp (size
))
12314 if (! host_integerp (len
, 1))
12316 /* If LEN is not constant, try MAXLEN too.
12317 For MAXLEN only allow optimizing into non-_ocs function
12318 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12319 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
12325 if (tree_int_cst_lt (size
, maxlen
))
12329 if (!init_target_chars ())
12332 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
12333 or if format doesn't contain % chars or is "%s". */
12334 if (! integer_zerop (flag
))
12336 fmt_str
= c_getstr (fmt
);
12337 if (fmt_str
== NULL
)
12339 if (strchr (fmt_str
, target_percent
) != NULL
12340 && strcmp (fmt_str
, target_percent_s
))
12344 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
12346 fn
= built_in_decls
[fcode
== BUILT_IN_VSNPRINTF_CHK
12347 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
];
12351 return rewrite_call_expr (exp
, 5, fn
, 3, dest
, len
, fmt
);
12354 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
12355 FMT and ARG are the arguments to the call; we don't fold cases with
12356 more than 2 arguments, and ARG may be null if this is a 1-argument case.
12358 Return NULL_TREE if no simplification was possible, otherwise return the
12359 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12360 code of the function to be simplified. */
12363 fold_builtin_printf (tree fndecl
, tree fmt
, tree arg
, bool ignore
,
12364 enum built_in_function fcode
)
12366 tree fn_putchar
, fn_puts
, newarg
, call
= NULL_TREE
;
12367 const char *fmt_str
= NULL
;
12369 /* If the return value is used, don't do the transformation. */
12373 /* Verify the required arguments in the original call. */
12374 if (!validate_arg (fmt
, POINTER_TYPE
))
12377 /* Check whether the format is a literal string constant. */
12378 fmt_str
= c_getstr (fmt
);
12379 if (fmt_str
== NULL
)
12382 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
12384 /* If we're using an unlocked function, assume the other
12385 unlocked functions exist explicitly. */
12386 fn_putchar
= built_in_decls
[BUILT_IN_PUTCHAR_UNLOCKED
];
12387 fn_puts
= built_in_decls
[BUILT_IN_PUTS_UNLOCKED
];
12391 fn_putchar
= implicit_built_in_decls
[BUILT_IN_PUTCHAR
];
12392 fn_puts
= implicit_built_in_decls
[BUILT_IN_PUTS
];
12395 if (!init_target_chars ())
12398 if (strcmp (fmt_str
, target_percent_s
) == 0
12399 || strchr (fmt_str
, target_percent
) == NULL
)
12403 if (strcmp (fmt_str
, target_percent_s
) == 0)
12405 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
12408 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
12411 str
= c_getstr (arg
);
12417 /* The format specifier doesn't contain any '%' characters. */
12418 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
12424 /* If the string was "", printf does nothing. */
12425 if (str
[0] == '\0')
12426 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
12428 /* If the string has length of 1, call putchar. */
12429 if (str
[1] == '\0')
12431 /* Given printf("c"), (where c is any one character,)
12432 convert "c"[0] to an int and pass that to the replacement
12434 newarg
= build_int_cst (NULL_TREE
, str
[0]);
12436 call
= build_call_expr (fn_putchar
, 1, newarg
);
12440 /* If the string was "string\n", call puts("string"). */
12441 size_t len
= strlen (str
);
12442 if ((unsigned char)str
[len
- 1] == target_newline
)
12444 /* Create a NUL-terminated string that's one char shorter
12445 than the original, stripping off the trailing '\n'. */
12446 char *newstr
= alloca (len
);
12447 memcpy (newstr
, str
, len
- 1);
12448 newstr
[len
- 1] = 0;
12450 newarg
= build_string_literal (len
, newstr
);
12452 call
= build_call_expr (fn_puts
, 1, newarg
);
12455 /* We'd like to arrange to call fputs(string,stdout) here,
12456 but we need stdout and don't have a way to get it yet. */
12461 /* The other optimizations can be done only on the non-va_list variants. */
12462 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
12465 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12466 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
12468 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
12471 call
= build_call_expr (fn_puts
, 1, arg
);
12474 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12475 else if (strcmp (fmt_str
, target_percent_c
) == 0)
12477 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
12480 call
= build_call_expr (fn_putchar
, 1, arg
);
12486 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), call
);
12489 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12490 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12491 more than 3 arguments, and ARG may be null in the 2-argument case.
12493 Return NULL_TREE if no simplification was possible, otherwise return the
12494 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12495 code of the function to be simplified. */
12498 fold_builtin_fprintf (tree fndecl
, tree fp
, tree fmt
, tree arg
, bool ignore
,
12499 enum built_in_function fcode
)
12501 tree fn_fputc
, fn_fputs
, call
= NULL_TREE
;
12502 const char *fmt_str
= NULL
;
12504 /* If the return value is used, don't do the transformation. */
12508 /* Verify the required arguments in the original call. */
12509 if (!validate_arg (fp
, POINTER_TYPE
))
12511 if (!validate_arg (fmt
, POINTER_TYPE
))
12514 /* Check whether the format is a literal string constant. */
12515 fmt_str
= c_getstr (fmt
);
12516 if (fmt_str
== NULL
)
12519 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
12521 /* If we're using an unlocked function, assume the other
12522 unlocked functions exist explicitly. */
12523 fn_fputc
= built_in_decls
[BUILT_IN_FPUTC_UNLOCKED
];
12524 fn_fputs
= built_in_decls
[BUILT_IN_FPUTS_UNLOCKED
];
12528 fn_fputc
= implicit_built_in_decls
[BUILT_IN_FPUTC
];
12529 fn_fputs
= implicit_built_in_decls
[BUILT_IN_FPUTS
];
12532 if (!init_target_chars ())
12535 /* If the format doesn't contain % args or %%, use strcpy. */
12536 if (strchr (fmt_str
, target_percent
) == NULL
)
12538 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
12542 /* If the format specifier was "", fprintf does nothing. */
12543 if (fmt_str
[0] == '\0')
12545 /* If FP has side-effects, just wait until gimplification is
12547 if (TREE_SIDE_EFFECTS (fp
))
12550 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
12553 /* When "string" doesn't contain %, replace all cases of
12554 fprintf (fp, string) with fputs (string, fp). The fputs
12555 builtin will take care of special cases like length == 1. */
12557 call
= build_call_expr (fn_fputs
, 2, fmt
, fp
);
12560 /* The other optimizations can be done only on the non-va_list variants. */
12561 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
12564 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12565 else if (strcmp (fmt_str
, target_percent_s
) == 0)
12567 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
12570 call
= build_call_expr (fn_fputs
, 2, arg
, fp
);
12573 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12574 else if (strcmp (fmt_str
, target_percent_c
) == 0)
12576 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
12579 call
= build_call_expr (fn_fputc
, 2, arg
, fp
);
12584 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), call
);
12587 /* Initialize format string characters in the target charset. */
12590 init_target_chars (void)
12595 target_newline
= lang_hooks
.to_target_charset ('\n');
12596 target_percent
= lang_hooks
.to_target_charset ('%');
12597 target_c
= lang_hooks
.to_target_charset ('c');
12598 target_s
= lang_hooks
.to_target_charset ('s');
12599 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
12603 target_percent_c
[0] = target_percent
;
12604 target_percent_c
[1] = target_c
;
12605 target_percent_c
[2] = '\0';
12607 target_percent_s
[0] = target_percent
;
12608 target_percent_s
[1] = target_s
;
12609 target_percent_s
[2] = '\0';
12611 target_percent_s_newline
[0] = target_percent
;
12612 target_percent_s_newline
[1] = target_s
;
12613 target_percent_s_newline
[2] = target_newline
;
12614 target_percent_s_newline
[3] = '\0';
12621 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12622 and no overflow/underflow occurred. INEXACT is true if M was not
12623 exactly calculated. TYPE is the tree type for the result. This
12624 function assumes that you cleared the MPFR flags and then
12625 calculated M to see if anything subsequently set a flag prior to
12626 entering this function. Return NULL_TREE if any checks fail. */
12629 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
12631 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12632 overflow/underflow occurred. If -frounding-math, proceed iff the
12633 result of calling FUNC was exact. */
12634 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12635 && (!flag_rounding_math
|| !inexact
))
12637 REAL_VALUE_TYPE rr
;
12639 real_from_mpfr (&rr
, m
, type
, GMP_RNDN
);
12640 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12641 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12642 but the mpft_t is not, then we underflowed in the
12644 if (real_isfinite (&rr
)
12645 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
12647 REAL_VALUE_TYPE rmode
;
12649 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
12650 /* Proceed iff the specified mode can hold the value. */
12651 if (real_identical (&rmode
, &rr
))
12652 return build_real (type
, rmode
);
12658 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12659 FUNC on it and return the resulting value as a tree with type TYPE.
12660 If MIN and/or MAX are not NULL, then the supplied ARG must be
12661 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12662 acceptable values, otherwise they are not. The mpfr precision is
12663 set to the precision of TYPE. We assume that function FUNC returns
12664 zero if the result could be calculated exactly within the requested
12668 do_mpfr_arg1 (tree arg
, tree type
, int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
12669 const REAL_VALUE_TYPE
*min
, const REAL_VALUE_TYPE
*max
,
12672 tree result
= NULL_TREE
;
12676 /* To proceed, MPFR must exactly represent the target floating point
12677 format, which only happens when the target base equals two. */
12678 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12679 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
12681 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
12683 if (real_isfinite (ra
)
12684 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
))
12685 && (!max
|| real_compare (inclusive
? LE_EXPR
: LT_EXPR
, ra
, max
)))
12687 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12691 mpfr_init2 (m
, prec
);
12692 mpfr_from_real (m
, ra
, GMP_RNDN
);
12693 mpfr_clear_flags ();
12694 inexact
= func (m
, m
, GMP_RNDN
);
12695 result
= do_mpfr_ckconv (m
, type
, inexact
);
12703 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12704 FUNC on it and return the resulting value as a tree with type TYPE.
12705 The mpfr precision is set to the precision of TYPE. We assume that
12706 function FUNC returns zero if the result could be calculated
12707 exactly within the requested precision. */
12710 do_mpfr_arg2 (tree arg1
, tree arg2
, tree type
,
12711 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
12713 tree result
= NULL_TREE
;
12718 /* To proceed, MPFR must exactly represent the target floating point
12719 format, which only happens when the target base equals two. */
12720 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12721 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
12722 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
12724 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
12725 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
12727 if (real_isfinite (ra1
) && real_isfinite (ra2
))
12729 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12733 mpfr_inits2 (prec
, m1
, m2
, NULL
);
12734 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
12735 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
12736 mpfr_clear_flags ();
12737 inexact
= func (m1
, m1
, m2
, GMP_RNDN
);
12738 result
= do_mpfr_ckconv (m1
, type
, inexact
);
12739 mpfr_clears (m1
, m2
, NULL
);
12746 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12747 FUNC on it and return the resulting value as a tree with type TYPE.
12748 The mpfr precision is set to the precision of TYPE. We assume that
12749 function FUNC returns zero if the result could be calculated
12750 exactly within the requested precision. */
12753 do_mpfr_arg3 (tree arg1
, tree arg2
, tree arg3
, tree type
,
12754 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
12756 tree result
= NULL_TREE
;
12762 /* To proceed, MPFR must exactly represent the target floating point
12763 format, which only happens when the target base equals two. */
12764 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12765 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
12766 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
)
12767 && TREE_CODE (arg3
) == REAL_CST
&& !TREE_OVERFLOW (arg3
))
12769 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
12770 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
12771 const REAL_VALUE_TYPE
*const ra3
= &TREE_REAL_CST (arg3
);
12773 if (real_isfinite (ra1
) && real_isfinite (ra2
) && real_isfinite (ra3
))
12775 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12779 mpfr_inits2 (prec
, m1
, m2
, m3
, NULL
);
12780 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
12781 mpfr_from_real (m2
, ra2
, GMP_RNDN
);
12782 mpfr_from_real (m3
, ra3
, GMP_RNDN
);
12783 mpfr_clear_flags ();
12784 inexact
= func (m1
, m1
, m2
, m3
, GMP_RNDN
);
12785 result
= do_mpfr_ckconv (m1
, type
, inexact
);
12786 mpfr_clears (m1
, m2
, m3
, NULL
);
12793 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12794 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12795 If ARG_SINP and ARG_COSP are NULL then the result is returned
12796 as a complex value.
12797 The type is taken from the type of ARG and is used for setting the
12798 precision of the calculation and results. */
12801 do_mpfr_sincos (tree arg
, tree arg_sinp
, tree arg_cosp
)
12803 tree
const type
= TREE_TYPE (arg
);
12804 tree result
= NULL_TREE
;
12808 /* To proceed, MPFR must exactly represent the target floating point
12809 format, which only happens when the target base equals two. */
12810 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12811 && TREE_CODE (arg
) == REAL_CST
12812 && !TREE_OVERFLOW (arg
))
12814 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
12816 if (real_isfinite (ra
))
12818 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12819 tree result_s
, result_c
;
12823 mpfr_inits2 (prec
, m
, ms
, mc
, NULL
);
12824 mpfr_from_real (m
, ra
, GMP_RNDN
);
12825 mpfr_clear_flags ();
12826 inexact
= mpfr_sin_cos (ms
, mc
, m
, GMP_RNDN
);
12827 result_s
= do_mpfr_ckconv (ms
, type
, inexact
);
12828 result_c
= do_mpfr_ckconv (mc
, type
, inexact
);
12829 mpfr_clears (m
, ms
, mc
, NULL
);
12830 if (result_s
&& result_c
)
12832 /* If we are to return in a complex value do so. */
12833 if (!arg_sinp
&& !arg_cosp
)
12834 return build_complex (build_complex_type (type
),
12835 result_c
, result_s
);
12837 /* Dereference the sin/cos pointer arguments. */
12838 arg_sinp
= build_fold_indirect_ref (arg_sinp
);
12839 arg_cosp
= build_fold_indirect_ref (arg_cosp
);
12840 /* Proceed if valid pointer type were passed in. */
12841 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp
)) == TYPE_MAIN_VARIANT (type
)
12842 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp
)) == TYPE_MAIN_VARIANT (type
))
12844 /* Set the values. */
12845 result_s
= fold_build2 (MODIFY_EXPR
, type
, arg_sinp
,
12847 TREE_SIDE_EFFECTS (result_s
) = 1;
12848 result_c
= fold_build2 (MODIFY_EXPR
, type
, arg_cosp
,
12850 TREE_SIDE_EFFECTS (result_c
) = 1;
12851 /* Combine the assignments into a compound expr. */
12852 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12853 result_s
, result_c
));
12861 #if MPFR_VERSION >= MPFR_VERSION_NUM(2,3,0)
12862 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12863 two-argument mpfr order N Bessel function FUNC on them and return
12864 the resulting value as a tree with type TYPE. The mpfr precision
12865 is set to the precision of TYPE. We assume that function FUNC
12866 returns zero if the result could be calculated exactly within the
12867 requested precision. */
12869 do_mpfr_bessel_n (tree arg1
, tree arg2
, tree type
,
12870 int (*func
)(mpfr_ptr
, long, mpfr_srcptr
, mp_rnd_t
),
12871 const REAL_VALUE_TYPE
*min
, bool inclusive
)
12873 tree result
= NULL_TREE
;
12878 /* To proceed, MPFR must exactly represent the target floating point
12879 format, which only happens when the target base equals two. */
12880 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12881 && host_integerp (arg1
, 0)
12882 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
12884 const HOST_WIDE_INT n
= tree_low_cst(arg1
, 0);
12885 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg2
);
12888 && real_isfinite (ra
)
12889 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
)))
12891 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12895 mpfr_init2 (m
, prec
);
12896 mpfr_from_real (m
, ra
, GMP_RNDN
);
12897 mpfr_clear_flags ();
12898 inexact
= func (m
, n
, m
, GMP_RNDN
);
12899 result
= do_mpfr_ckconv (m
, type
, inexact
);
12907 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12908 the pointer *(ARG_QUO) and return the result. The type is taken
12909 from the type of ARG0 and is used for setting the precision of the
12910 calculation and results. */
12913 do_mpfr_remquo (tree arg0
, tree arg1
, tree arg_quo
)
12915 tree
const type
= TREE_TYPE (arg0
);
12916 tree result
= NULL_TREE
;
12921 /* To proceed, MPFR must exactly represent the target floating point
12922 format, which only happens when the target base equals two. */
12923 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12924 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
12925 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
))
12927 const REAL_VALUE_TYPE
*const ra0
= TREE_REAL_CST_PTR (arg0
);
12928 const REAL_VALUE_TYPE
*const ra1
= TREE_REAL_CST_PTR (arg1
);
12930 if (real_isfinite (ra0
) && real_isfinite (ra1
))
12932 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12937 mpfr_inits2 (prec
, m0
, m1
, NULL
);
12938 mpfr_from_real (m0
, ra0
, GMP_RNDN
);
12939 mpfr_from_real (m1
, ra1
, GMP_RNDN
);
12940 mpfr_clear_flags ();
12941 mpfr_remquo (m0
, &integer_quo
, m0
, m1
, GMP_RNDN
);
12942 /* Remquo is independent of the rounding mode, so pass
12943 inexact=0 to do_mpfr_ckconv(). */
12944 result_rem
= do_mpfr_ckconv (m0
, type
, /*inexact=*/ 0);
12945 mpfr_clears (m0
, m1
, NULL
);
12948 /* MPFR calculates quo in the host's long so it may
12949 return more bits in quo than the target int can hold
12950 if sizeof(host long) > sizeof(target int). This can
12951 happen even for native compilers in LP64 mode. In
12952 these cases, modulo the quo value with the largest
12953 number that the target int can hold while leaving one
12954 bit for the sign. */
12955 if (sizeof (integer_quo
) * CHAR_BIT
> INT_TYPE_SIZE
)
12956 integer_quo
%= (long)(1UL << (INT_TYPE_SIZE
- 1));
12958 /* Dereference the quo pointer argument. */
12959 arg_quo
= build_fold_indirect_ref (arg_quo
);
12960 /* Proceed iff a valid pointer type was passed in. */
12961 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo
)) == integer_type_node
)
12963 /* Set the value. */
12964 tree result_quo
= fold_build2 (MODIFY_EXPR
,
12965 TREE_TYPE (arg_quo
), arg_quo
,
12966 build_int_cst (NULL
, integer_quo
));
12967 TREE_SIDE_EFFECTS (result_quo
) = 1;
12968 /* Combine the quo assignment with the rem. */
12969 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12970 result_quo
, result_rem
));
12978 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12979 resulting value as a tree with type TYPE. The mpfr precision is
12980 set to the precision of TYPE. We assume that this mpfr function
12981 returns zero if the result could be calculated exactly within the
12982 requested precision. In addition, the integer pointer represented
12983 by ARG_SG will be dereferenced and set to the appropriate signgam
12987 do_mpfr_lgamma_r (tree arg
, tree arg_sg
, tree type
)
12989 tree result
= NULL_TREE
;
12993 /* To proceed, MPFR must exactly represent the target floating point
12994 format, which only happens when the target base equals two. Also
12995 verify ARG is a constant and that ARG_SG is an int pointer. */
12996 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12997 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
)
12998 && TREE_CODE (TREE_TYPE (arg_sg
)) == POINTER_TYPE
12999 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg
))) == integer_type_node
)
13001 const REAL_VALUE_TYPE
*const ra
= TREE_REAL_CST_PTR (arg
);
13003 /* In addition to NaN and Inf, the argument cannot be zero or a
13004 negative integer. */
13005 if (real_isfinite (ra
)
13006 && ra
->cl
!= rvc_zero
13007 && !(real_isneg(ra
) && real_isinteger(ra
, TYPE_MODE (type
))))
13009 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
13014 mpfr_init2 (m
, prec
);
13015 mpfr_from_real (m
, ra
, GMP_RNDN
);
13016 mpfr_clear_flags ();
13017 inexact
= mpfr_lgamma (m
, &sg
, m
, GMP_RNDN
);
13018 result_lg
= do_mpfr_ckconv (m
, type
, inexact
);
13024 /* Dereference the arg_sg pointer argument. */
13025 arg_sg
= build_fold_indirect_ref (arg_sg
);
13026 /* Assign the signgam value into *arg_sg. */
13027 result_sg
= fold_build2 (MODIFY_EXPR
,
13028 TREE_TYPE (arg_sg
), arg_sg
,
13029 build_int_cst (NULL
, sg
));
13030 TREE_SIDE_EFFECTS (result_sg
) = 1;
13031 /* Combine the signgam assignment with the lgamma result. */
13032 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
13033 result_sg
, result_lg
));