1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 #include "coretypes.h"
31 #include "tree-gimple.h"
34 #include "hard-reg-set.h"
37 #include "insn-config.h"
43 #include "typeclass.h"
48 #include "langhooks.h"
49 #include "basic-block.h"
50 #include "tree-mudflap.h"
51 #include "tree-flow.h"
52 #include "value-prof.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names
[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names
[(int) END_BUILTINS
] =
65 #include "builtins.def"
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls
[(int) END_BUILTINS
];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls
[(int) END_BUILTINS
];
77 static const char *c_getstr (tree
);
78 static rtx
c_readstr (const char *, enum machine_mode
);
79 static int target_char_cast (tree
, char *);
80 static rtx
get_memory_rtx (tree
, tree
);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx
result_vector (int, rtx
);
86 static void expand_builtin_update_setjmp_buf (rtx
);
87 static void expand_builtin_prefetch (tree
);
88 static rtx
expand_builtin_apply_args (void);
89 static rtx
expand_builtin_apply_args_1 (void);
90 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
91 static void expand_builtin_return (rtx
);
92 static enum type_class
type_to_class (tree
);
93 static rtx
expand_builtin_classify_type (tree
);
94 static void expand_errno_check (tree
, rtx
);
95 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
96 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
97 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
98 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
, rtx
);
99 static rtx
expand_builtin_sincos (tree
);
100 static rtx
expand_builtin_cexpi (tree
, rtx
, rtx
);
101 static rtx
expand_builtin_int_roundingfn (tree
, rtx
, rtx
);
102 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
, rtx
);
103 static rtx
expand_builtin_args_info (tree
);
104 static rtx
expand_builtin_next_arg (void);
105 static rtx
expand_builtin_va_start (tree
);
106 static rtx
expand_builtin_va_end (tree
);
107 static rtx
expand_builtin_va_copy (tree
);
108 static rtx
expand_builtin_memcmp (tree
, rtx
, enum machine_mode
);
109 static rtx
expand_builtin_strcmp (tree
, rtx
, enum machine_mode
);
110 static rtx
expand_builtin_strncmp (tree
, rtx
, enum machine_mode
);
111 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
112 static rtx
expand_builtin_strcat (tree
, tree
, rtx
, enum machine_mode
);
113 static rtx
expand_builtin_strncat (tree
, rtx
, enum machine_mode
);
114 static rtx
expand_builtin_strspn (tree
, rtx
, enum machine_mode
);
115 static rtx
expand_builtin_strcspn (tree
, rtx
, enum machine_mode
);
116 static rtx
expand_builtin_memcpy (tree
, rtx
, enum machine_mode
);
117 static rtx
expand_builtin_mempcpy (tree
, rtx
, enum machine_mode
);
118 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, tree
, rtx
,
119 enum machine_mode
, int);
120 static rtx
expand_builtin_memmove (tree
, rtx
, enum machine_mode
, int);
121 static rtx
expand_builtin_memmove_args (tree
, tree
, tree
, tree
, rtx
,
122 enum machine_mode
, int);
123 static rtx
expand_builtin_bcopy (tree
, int);
124 static rtx
expand_builtin_strcpy (tree
, tree
, rtx
, enum machine_mode
);
125 static rtx
expand_builtin_strcpy_args (tree
, tree
, tree
, rtx
, enum machine_mode
);
126 static rtx
expand_builtin_stpcpy (tree
, rtx
, enum machine_mode
);
127 static rtx
builtin_strncpy_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
128 static rtx
expand_builtin_strncpy (tree
, rtx
, enum machine_mode
);
129 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, enum machine_mode
);
130 static rtx
expand_builtin_memset (tree
, rtx
, enum machine_mode
);
131 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, enum machine_mode
, tree
);
132 static rtx
expand_builtin_bzero (tree
);
133 static rtx
expand_builtin_strlen (tree
, rtx
, enum machine_mode
);
134 static rtx
expand_builtin_strstr (tree
, rtx
, enum machine_mode
);
135 static rtx
expand_builtin_strpbrk (tree
, rtx
, enum machine_mode
);
136 static rtx
expand_builtin_strchr (tree
, rtx
, enum machine_mode
);
137 static rtx
expand_builtin_strrchr (tree
, rtx
, enum machine_mode
);
138 static rtx
expand_builtin_alloca (tree
, rtx
);
139 static rtx
expand_builtin_unop (enum machine_mode
, tree
, rtx
, rtx
, optab
);
140 static rtx
expand_builtin_frame_address (tree
, tree
);
141 static rtx
expand_builtin_fputs (tree
, rtx
, bool);
142 static rtx
expand_builtin_printf (tree
, rtx
, enum machine_mode
, bool);
143 static rtx
expand_builtin_fprintf (tree
, rtx
, enum machine_mode
, bool);
144 static rtx
expand_builtin_sprintf (tree
, rtx
, enum machine_mode
);
145 static tree
stabilize_va_list (tree
, int);
146 static rtx
expand_builtin_expect (tree
, rtx
);
147 static tree
fold_builtin_constant_p (tree
);
148 static tree
fold_builtin_expect (tree
);
149 static tree
fold_builtin_classify_type (tree
);
150 static tree
fold_builtin_strlen (tree
);
151 static tree
fold_builtin_inf (tree
, int);
152 static tree
fold_builtin_nan (tree
, tree
, int);
153 static tree
rewrite_call_expr (tree
, int, tree
, int, ...);
154 static bool validate_arg (tree
, enum tree_code code
);
155 static bool integer_valued_real_p (tree
);
156 static tree
fold_trunc_transparent_mathfn (tree
, tree
);
157 static bool readonly_data_expr (tree
);
158 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
159 static rtx
expand_builtin_signbit (tree
, rtx
);
160 static tree
fold_builtin_sqrt (tree
, tree
);
161 static tree
fold_builtin_cbrt (tree
, tree
);
162 static tree
fold_builtin_pow (tree
, tree
, tree
, tree
);
163 static tree
fold_builtin_powi (tree
, tree
, tree
, tree
);
164 static tree
fold_builtin_cos (tree
, tree
, tree
);
165 static tree
fold_builtin_cosh (tree
, tree
, tree
);
166 static tree
fold_builtin_tan (tree
, tree
);
167 static tree
fold_builtin_trunc (tree
, tree
);
168 static tree
fold_builtin_floor (tree
, tree
);
169 static tree
fold_builtin_ceil (tree
, tree
);
170 static tree
fold_builtin_round (tree
, tree
);
171 static tree
fold_builtin_int_roundingfn (tree
, tree
);
172 static tree
fold_builtin_bitop (tree
, tree
);
173 static tree
fold_builtin_memory_op (tree
, tree
, tree
, tree
, bool, int);
174 static tree
fold_builtin_strchr (tree
, tree
, tree
);
175 static tree
fold_builtin_memcmp (tree
, tree
, tree
);
176 static tree
fold_builtin_strcmp (tree
, tree
);
177 static tree
fold_builtin_strncmp (tree
, tree
, tree
);
178 static tree
fold_builtin_signbit (tree
, tree
);
179 static tree
fold_builtin_copysign (tree
, tree
, tree
, tree
);
180 static tree
fold_builtin_isascii (tree
);
181 static tree
fold_builtin_toascii (tree
);
182 static tree
fold_builtin_isdigit (tree
);
183 static tree
fold_builtin_fabs (tree
, tree
);
184 static tree
fold_builtin_abs (tree
, tree
);
185 static tree
fold_builtin_unordered_cmp (tree
, tree
, tree
, enum tree_code
,
187 static tree
fold_builtin_n (tree
, tree
*, int, bool);
188 static tree
fold_builtin_0 (tree
, bool);
189 static tree
fold_builtin_1 (tree
, tree
, bool);
190 static tree
fold_builtin_2 (tree
, tree
, tree
, bool);
191 static tree
fold_builtin_3 (tree
, tree
, tree
, tree
, bool);
192 static tree
fold_builtin_4 (tree
, tree
, tree
, tree
, tree
, bool);
193 static tree
fold_builtin_varargs (tree
, tree
, bool);
195 static tree
fold_builtin_strpbrk (tree
, tree
, tree
);
196 static tree
fold_builtin_strstr (tree
, tree
, tree
);
197 static tree
fold_builtin_strrchr (tree
, tree
, tree
);
198 static tree
fold_builtin_strcat (tree
, tree
);
199 static tree
fold_builtin_strncat (tree
, tree
, tree
);
200 static tree
fold_builtin_strspn (tree
, tree
);
201 static tree
fold_builtin_strcspn (tree
, tree
);
202 static tree
fold_builtin_sprintf (tree
, tree
, tree
, int);
204 static rtx
expand_builtin_object_size (tree
);
205 static rtx
expand_builtin_memory_chk (tree
, rtx
, enum machine_mode
,
206 enum built_in_function
);
207 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
208 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
209 static tree
fold_builtin_object_size (tree
, tree
);
210 static tree
fold_builtin_strcat_chk (tree
, tree
, tree
, tree
);
211 static tree
fold_builtin_strncat_chk (tree
, tree
, tree
, tree
, tree
);
212 static tree
fold_builtin_sprintf_chk (tree
, enum built_in_function
);
213 static tree
fold_builtin_printf (tree
, tree
, tree
, bool, enum built_in_function
);
214 static tree
fold_builtin_fprintf (tree
, tree
, tree
, tree
, bool,
215 enum built_in_function
);
216 static bool init_target_chars (void);
218 static unsigned HOST_WIDE_INT target_newline
;
219 static unsigned HOST_WIDE_INT target_percent
;
220 static unsigned HOST_WIDE_INT target_c
;
221 static unsigned HOST_WIDE_INT target_s
;
222 static char target_percent_c
[3];
223 static char target_percent_s
[3];
224 static char target_percent_s_newline
[4];
225 static tree
do_mpfr_arg1 (tree
, tree
, int (*)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
226 const REAL_VALUE_TYPE
*, const REAL_VALUE_TYPE
*, bool);
227 static tree
do_mpfr_arg2 (tree
, tree
, tree
,
228 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
229 static tree
do_mpfr_arg3 (tree
, tree
, tree
, tree
,
230 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
231 static tree
do_mpfr_sincos (tree
, tree
, tree
);
233 /* Return true if NODE should be considered for inline expansion regardless
234 of the optimization level. This means whenever a function is invoked with
235 its "internal" name, which normally contains the prefix "__builtin". */
237 static bool called_as_built_in (tree node
)
239 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
240 if (strncmp (name
, "__builtin_", 10) == 0)
242 if (strncmp (name
, "__sync_", 7) == 0)
247 /* Return the alignment in bits of EXP, a pointer valued expression.
248 But don't return more than MAX_ALIGN no matter what.
249 The alignment returned is, by default, the alignment of the thing that
250 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
252 Otherwise, look at the expression to see if we can do better, i.e., if the
253 expression is actually pointing at an object whose alignment is tighter. */
256 get_pointer_alignment (tree exp
, unsigned int max_align
)
258 unsigned int align
, inner
;
260 /* We rely on TER to compute accurate alignment information. */
261 if (!(optimize
&& flag_tree_ter
))
264 if (!POINTER_TYPE_P (TREE_TYPE (exp
)))
267 align
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
268 align
= MIN (align
, max_align
);
272 switch (TREE_CODE (exp
))
276 case NON_LVALUE_EXPR
:
277 exp
= TREE_OPERAND (exp
, 0);
278 if (! POINTER_TYPE_P (TREE_TYPE (exp
)))
281 inner
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
282 align
= MIN (inner
, max_align
);
286 /* If sum of pointer + int, restrict our maximum alignment to that
287 imposed by the integer. If not, we can't do any better than
289 if (! host_integerp (TREE_OPERAND (exp
, 1), 1))
292 while (((tree_low_cst (TREE_OPERAND (exp
, 1), 1))
293 & (max_align
/ BITS_PER_UNIT
- 1))
297 exp
= TREE_OPERAND (exp
, 0);
301 /* See what we are pointing at and look at its alignment. */
302 exp
= TREE_OPERAND (exp
, 0);
304 if (handled_component_p (exp
))
306 HOST_WIDE_INT bitsize
, bitpos
;
308 enum machine_mode mode
;
309 int unsignedp
, volatilep
;
311 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
312 &mode
, &unsignedp
, &volatilep
, true);
314 inner
= MIN (inner
, (unsigned) (bitpos
& -bitpos
));
315 if (offset
&& TREE_CODE (offset
) == PLUS_EXPR
316 && host_integerp (TREE_OPERAND (offset
, 1), 1))
318 /* Any overflow in calculating offset_bits won't change
321 = ((unsigned) tree_low_cst (TREE_OPERAND (offset
, 1), 1)
325 inner
= MIN (inner
, (offset_bits
& -offset_bits
));
326 offset
= TREE_OPERAND (offset
, 0);
328 if (offset
&& TREE_CODE (offset
) == MULT_EXPR
329 && host_integerp (TREE_OPERAND (offset
, 1), 1))
331 /* Any overflow in calculating offset_factor won't change
333 unsigned offset_factor
334 = ((unsigned) tree_low_cst (TREE_OPERAND (offset
, 1), 1)
338 inner
= MIN (inner
, (offset_factor
& -offset_factor
));
341 inner
= MIN (inner
, BITS_PER_UNIT
);
343 if (TREE_CODE (exp
) == FUNCTION_DECL
)
344 align
= FUNCTION_BOUNDARY
;
345 else if (DECL_P (exp
))
346 align
= MIN (inner
, DECL_ALIGN (exp
));
347 #ifdef CONSTANT_ALIGNMENT
348 else if (CONSTANT_CLASS_P (exp
))
349 align
= MIN (inner
, (unsigned)CONSTANT_ALIGNMENT (exp
, align
));
351 else if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
352 || TREE_CODE (exp
) == INDIRECT_REF
)
353 align
= MIN (TYPE_ALIGN (TREE_TYPE (exp
)), inner
);
355 align
= MIN (align
, inner
);
356 return MIN (align
, max_align
);
364 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
365 way, because it could contain a zero byte in the middle.
366 TREE_STRING_LENGTH is the size of the character array, not the string.
368 ONLY_VALUE should be nonzero if the result is not going to be emitted
369 into the instruction stream and zero if it is going to be expanded.
370 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
371 is returned, otherwise NULL, since
372 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
373 evaluate the side-effects.
375 The value returned is of type `ssizetype'.
377 Unfortunately, string_constant can't access the values of const char
378 arrays with initializers, so neither can we do so here. */
381 c_strlen (tree src
, int only_value
)
384 HOST_WIDE_INT offset
;
389 if (TREE_CODE (src
) == COND_EXPR
390 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
394 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
395 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
396 if (tree_int_cst_equal (len1
, len2
))
400 if (TREE_CODE (src
) == COMPOUND_EXPR
401 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
402 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
404 src
= string_constant (src
, &offset_node
);
408 max
= TREE_STRING_LENGTH (src
) - 1;
409 ptr
= TREE_STRING_POINTER (src
);
411 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
413 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
414 compute the offset to the following null if we don't know where to
415 start searching for it. */
418 for (i
= 0; i
< max
; i
++)
422 /* We don't know the starting offset, but we do know that the string
423 has no internal zero bytes. We can assume that the offset falls
424 within the bounds of the string; otherwise, the programmer deserves
425 what he gets. Subtract the offset from the length of the string,
426 and return that. This would perhaps not be valid if we were dealing
427 with named arrays in addition to literal string constants. */
429 return size_diffop (size_int (max
), offset_node
);
432 /* We have a known offset into the string. Start searching there for
433 a null character if we can represent it as a single HOST_WIDE_INT. */
434 if (offset_node
== 0)
436 else if (! host_integerp (offset_node
, 0))
439 offset
= tree_low_cst (offset_node
, 0);
441 /* If the offset is known to be out of bounds, warn, and call strlen at
443 if (offset
< 0 || offset
> max
)
445 warning (0, "offset outside bounds of constant string");
449 /* Use strlen to search for the first zero byte. Since any strings
450 constructed with build_string will have nulls appended, we win even
451 if we get handed something like (char[4])"abcd".
453 Since OFFSET is our starting index into the string, no further
454 calculation is needed. */
455 return ssize_int (strlen (ptr
+ offset
));
458 /* Return a char pointer for a C string if it is a string constant
459 or sum of string constant and integer constant. */
466 src
= string_constant (src
, &offset_node
);
470 if (offset_node
== 0)
471 return TREE_STRING_POINTER (src
);
472 else if (!host_integerp (offset_node
, 1)
473 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
476 return TREE_STRING_POINTER (src
) + tree_low_cst (offset_node
, 1);
479 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
480 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
483 c_readstr (const char *str
, enum machine_mode mode
)
489 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
494 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
497 if (WORDS_BIG_ENDIAN
)
498 j
= GET_MODE_SIZE (mode
) - i
- 1;
499 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
500 && GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
501 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
503 gcc_assert (j
<= 2 * HOST_BITS_PER_WIDE_INT
);
506 ch
= (unsigned char) str
[i
];
507 c
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
509 return immed_double_const (c
[0], c
[1], mode
);
512 /* Cast a target constant CST to target CHAR and if that value fits into
513 host char type, return zero and put that value into variable pointed to by
517 target_char_cast (tree cst
, char *p
)
519 unsigned HOST_WIDE_INT val
, hostval
;
521 if (!host_integerp (cst
, 1)
522 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
525 val
= tree_low_cst (cst
, 1);
526 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
527 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
530 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
531 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
540 /* Similar to save_expr, but assumes that arbitrary code is not executed
541 in between the multiple evaluations. In particular, we assume that a
542 non-addressable local variable will not be modified. */
545 builtin_save_expr (tree exp
)
547 if (TREE_ADDRESSABLE (exp
) == 0
548 && (TREE_CODE (exp
) == PARM_DECL
549 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
))))
552 return save_expr (exp
);
555 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
556 times to get the address of either a higher stack frame, or a return
557 address located within it (depending on FNDECL_CODE). */
560 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
564 #ifdef INITIAL_FRAME_ADDRESS_RTX
565 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
569 /* For a zero count with __builtin_return_address, we don't care what
570 frame address we return, because target-specific definitions will
571 override us. Therefore frame pointer elimination is OK, and using
572 the soft frame pointer is OK.
574 For a nonzero count, or a zero count with __builtin_frame_address,
575 we require a stable offset from the current frame pointer to the
576 previous one, so we must use the hard frame pointer, and
577 we must disable frame pointer elimination. */
578 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
579 tem
= frame_pointer_rtx
;
582 tem
= hard_frame_pointer_rtx
;
584 /* Tell reload not to eliminate the frame pointer. */
585 current_function_accesses_prior_frames
= 1;
589 /* Some machines need special handling before we can access
590 arbitrary frames. For example, on the SPARC, we must first flush
591 all register windows to the stack. */
592 #ifdef SETUP_FRAME_ADDRESSES
594 SETUP_FRAME_ADDRESSES ();
597 /* On the SPARC, the return address is not in the frame, it is in a
598 register. There is no way to access it off of the current frame
599 pointer, but it can be accessed off the previous frame pointer by
600 reading the value from the register window save area. */
601 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
602 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
606 /* Scan back COUNT frames to the specified frame. */
607 for (i
= 0; i
< count
; i
++)
609 /* Assume the dynamic chain pointer is in the word that the
610 frame address points to, unless otherwise specified. */
611 #ifdef DYNAMIC_CHAIN_ADDRESS
612 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
614 tem
= memory_address (Pmode
, tem
);
615 tem
= gen_frame_mem (Pmode
, tem
);
616 tem
= copy_to_reg (tem
);
619 /* For __builtin_frame_address, return what we've got. But, on
620 the SPARC for example, we may have to add a bias. */
621 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
622 #ifdef FRAME_ADDR_RTX
623 return FRAME_ADDR_RTX (tem
);
628 /* For __builtin_return_address, get the return address from that frame. */
629 #ifdef RETURN_ADDR_RTX
630 tem
= RETURN_ADDR_RTX (count
, tem
);
632 tem
= memory_address (Pmode
,
633 plus_constant (tem
, GET_MODE_SIZE (Pmode
)));
634 tem
= gen_frame_mem (Pmode
, tem
);
639 /* Alias set used for setjmp buffer. */
640 static HOST_WIDE_INT setjmp_alias_set
= -1;
642 /* Construct the leading half of a __builtin_setjmp call. Control will
643 return to RECEIVER_LABEL. This is also called directly by the SJLJ
644 exception handling code. */
647 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
649 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
653 if (setjmp_alias_set
== -1)
654 setjmp_alias_set
= new_alias_set ();
656 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
658 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
660 /* We store the frame pointer and the address of receiver_label in
661 the buffer and use the rest of it for the stack save area, which
662 is machine-dependent. */
664 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
665 set_mem_alias_set (mem
, setjmp_alias_set
);
666 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
668 mem
= gen_rtx_MEM (Pmode
, plus_constant (buf_addr
, GET_MODE_SIZE (Pmode
))),
669 set_mem_alias_set (mem
, setjmp_alias_set
);
671 emit_move_insn (validize_mem (mem
),
672 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
674 stack_save
= gen_rtx_MEM (sa_mode
,
675 plus_constant (buf_addr
,
676 2 * GET_MODE_SIZE (Pmode
)));
677 set_mem_alias_set (stack_save
, setjmp_alias_set
);
678 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
680 /* If there is further processing to do, do it. */
681 #ifdef HAVE_builtin_setjmp_setup
682 if (HAVE_builtin_setjmp_setup
)
683 emit_insn (gen_builtin_setjmp_setup (buf_addr
));
686 /* Tell optimize_save_area_alloca that extra work is going to
687 need to go on during alloca. */
688 current_function_calls_setjmp
= 1;
690 /* Set this so all the registers get saved in our frame; we need to be
691 able to copy the saved values for any registers from frames we unwind. */
692 current_function_has_nonlocal_label
= 1;
695 /* Construct the trailing part of a __builtin_setjmp call. This is
696 also called directly by the SJLJ exception handling code. */
699 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED
)
701 /* Clobber the FP when we get here, so we have to make sure it's
702 marked as used by this function. */
703 emit_insn (gen_rtx_USE (VOIDmode
, hard_frame_pointer_rtx
));
705 /* Mark the static chain as clobbered here so life information
706 doesn't get messed up for it. */
707 emit_insn (gen_rtx_CLOBBER (VOIDmode
, static_chain_rtx
));
709 /* Now put in the code to restore the frame pointer, and argument
710 pointer, if needed. */
711 #ifdef HAVE_nonlocal_goto
712 if (! HAVE_nonlocal_goto
)
715 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
716 /* This might change the hard frame pointer in ways that aren't
717 apparent to early optimization passes, so force a clobber. */
718 emit_insn (gen_rtx_CLOBBER (VOIDmode
, hard_frame_pointer_rtx
));
721 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
722 if (fixed_regs
[ARG_POINTER_REGNUM
])
724 #ifdef ELIMINABLE_REGS
726 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
728 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
729 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
730 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
733 if (i
== ARRAY_SIZE (elim_regs
))
736 /* Now restore our arg pointer from the address at which it
737 was saved in our stack frame. */
738 emit_move_insn (virtual_incoming_args_rtx
,
739 copy_to_reg (get_arg_pointer_save_area (cfun
)));
744 #ifdef HAVE_builtin_setjmp_receiver
745 if (HAVE_builtin_setjmp_receiver
)
746 emit_insn (gen_builtin_setjmp_receiver (receiver_label
));
749 #ifdef HAVE_nonlocal_goto_receiver
750 if (HAVE_nonlocal_goto_receiver
)
751 emit_insn (gen_nonlocal_goto_receiver ());
756 /* @@@ This is a kludge. Not all machine descriptions define a blockage
757 insn, but we must not allow the code we just generated to be reordered
758 by scheduling. Specifically, the update of the frame pointer must
759 happen immediately, not later. So emit an ASM_INPUT to act as blockage
761 emit_insn (gen_rtx_ASM_INPUT (VOIDmode
, ""));
764 /* __builtin_longjmp is passed a pointer to an array of five words (not
765 all will be used on all machines). It operates similarly to the C
766 library function of the same name, but is more efficient. Much of
767 the code below is copied from the handling of non-local gotos. */
770 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
772 rtx fp
, lab
, stack
, insn
, last
;
773 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
775 if (setjmp_alias_set
== -1)
776 setjmp_alias_set
= new_alias_set ();
778 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
780 buf_addr
= force_reg (Pmode
, buf_addr
);
782 /* We used to store value in static_chain_rtx, but that fails if pointers
783 are smaller than integers. We instead require that the user must pass
784 a second argument of 1, because that is what builtin_setjmp will
785 return. This also makes EH slightly more efficient, since we are no
786 longer copying around a value that we don't care about. */
787 gcc_assert (value
== const1_rtx
);
789 last
= get_last_insn ();
790 #ifdef HAVE_builtin_longjmp
791 if (HAVE_builtin_longjmp
)
792 emit_insn (gen_builtin_longjmp (buf_addr
));
796 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
797 lab
= gen_rtx_MEM (Pmode
, plus_constant (buf_addr
,
798 GET_MODE_SIZE (Pmode
)));
800 stack
= gen_rtx_MEM (sa_mode
, plus_constant (buf_addr
,
801 2 * GET_MODE_SIZE (Pmode
)));
802 set_mem_alias_set (fp
, setjmp_alias_set
);
803 set_mem_alias_set (lab
, setjmp_alias_set
);
804 set_mem_alias_set (stack
, setjmp_alias_set
);
806 /* Pick up FP, label, and SP from the block and jump. This code is
807 from expand_goto in stmt.c; see there for detailed comments. */
808 #ifdef HAVE_nonlocal_goto
809 if (HAVE_nonlocal_goto
)
810 /* We have to pass a value to the nonlocal_goto pattern that will
811 get copied into the static_chain pointer, but it does not matter
812 what that value is, because builtin_setjmp does not use it. */
813 emit_insn (gen_nonlocal_goto (value
, lab
, stack
, fp
));
817 lab
= copy_to_reg (lab
);
819 emit_insn (gen_rtx_CLOBBER (VOIDmode
,
820 gen_rtx_MEM (BLKmode
,
821 gen_rtx_SCRATCH (VOIDmode
))));
822 emit_insn (gen_rtx_CLOBBER (VOIDmode
,
823 gen_rtx_MEM (BLKmode
,
824 hard_frame_pointer_rtx
)));
826 emit_move_insn (hard_frame_pointer_rtx
, fp
);
827 emit_stack_restore (SAVE_NONLOCAL
, stack
, NULL_RTX
);
829 emit_insn (gen_rtx_USE (VOIDmode
, hard_frame_pointer_rtx
));
830 emit_insn (gen_rtx_USE (VOIDmode
, stack_pointer_rtx
));
831 emit_indirect_jump (lab
);
835 /* Search backwards and mark the jump insn as a non-local goto.
836 Note that this precludes the use of __builtin_longjmp to a
837 __builtin_setjmp target in the same function. However, we've
838 already cautioned the user that these functions are for
839 internal exception handling use only. */
840 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
842 gcc_assert (insn
!= last
);
846 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO
, const0_rtx
,
850 else if (CALL_P (insn
))
855 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
856 and the address of the save area. */
859 expand_builtin_nonlocal_goto (tree exp
)
861 tree t_label
, t_save_area
;
862 rtx r_label
, r_save_area
, r_fp
, r_sp
, insn
;
864 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
867 t_label
= CALL_EXPR_ARG (exp
, 0);
868 t_save_area
= CALL_EXPR_ARG (exp
, 1);
870 r_label
= expand_normal (t_label
);
871 r_label
= convert_memory_address (Pmode
, r_label
);
872 r_save_area
= expand_normal (t_save_area
);
873 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
874 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
875 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
876 plus_constant (r_save_area
, GET_MODE_SIZE (Pmode
)));
878 current_function_has_nonlocal_goto
= 1;
880 #ifdef HAVE_nonlocal_goto
881 /* ??? We no longer need to pass the static chain value, afaik. */
882 if (HAVE_nonlocal_goto
)
883 emit_insn (gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
887 r_label
= copy_to_reg (r_label
);
889 emit_insn (gen_rtx_CLOBBER (VOIDmode
,
890 gen_rtx_MEM (BLKmode
,
891 gen_rtx_SCRATCH (VOIDmode
))));
893 emit_insn (gen_rtx_CLOBBER (VOIDmode
,
894 gen_rtx_MEM (BLKmode
,
895 hard_frame_pointer_rtx
)));
897 /* Restore frame pointer for containing function.
898 This sets the actual hard register used for the frame pointer
899 to the location of the function's incoming static chain info.
900 The non-local goto handler will then adjust it to contain the
901 proper value and reload the argument pointer, if needed. */
902 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
903 emit_stack_restore (SAVE_NONLOCAL
, r_sp
, NULL_RTX
);
905 /* USE of hard_frame_pointer_rtx added for consistency;
906 not clear if really needed. */
907 emit_insn (gen_rtx_USE (VOIDmode
, hard_frame_pointer_rtx
));
908 emit_insn (gen_rtx_USE (VOIDmode
, stack_pointer_rtx
));
909 emit_indirect_jump (r_label
);
912 /* Search backwards to the jump insn and mark it as a
914 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
918 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO
,
919 const0_rtx
, REG_NOTES (insn
));
922 else if (CALL_P (insn
))
929 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
930 (not all will be used on all machines) that was passed to __builtin_setjmp.
931 It updates the stack pointer in that block to correspond to the current
935 expand_builtin_update_setjmp_buf (rtx buf_addr
)
937 enum machine_mode sa_mode
= Pmode
;
941 #ifdef HAVE_save_stack_nonlocal
942 if (HAVE_save_stack_nonlocal
)
943 sa_mode
= insn_data
[(int) CODE_FOR_save_stack_nonlocal
].operand
[0].mode
;
945 #ifdef STACK_SAVEAREA_MODE
946 sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
950 = gen_rtx_MEM (sa_mode
,
953 plus_constant (buf_addr
, 2 * GET_MODE_SIZE (Pmode
))));
957 emit_insn (gen_setjmp ());
960 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
963 /* Expand a call to __builtin_prefetch. For a target that does not support
964 data prefetch, evaluate the memory address argument in case it has side
968 expand_builtin_prefetch (tree exp
)
970 tree arg0
, arg1
, arg2
;
974 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
977 arg0
= CALL_EXPR_ARG (exp
, 0);
979 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
980 zero (read) and argument 2 (locality) defaults to 3 (high degree of
982 nargs
= call_expr_nargs (exp
);
984 arg1
= CALL_EXPR_ARG (exp
, 1);
986 arg1
= integer_zero_node
;
988 arg2
= CALL_EXPR_ARG (exp
, 2);
990 arg2
= build_int_cst (NULL_TREE
, 3);
992 /* Argument 0 is an address. */
993 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
995 /* Argument 1 (read/write flag) must be a compile-time constant int. */
996 if (TREE_CODE (arg1
) != INTEGER_CST
)
998 error ("second argument to %<__builtin_prefetch%> must be a constant");
999 arg1
= integer_zero_node
;
1001 op1
= expand_normal (arg1
);
1002 /* Argument 1 must be either zero or one. */
1003 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1005 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1010 /* Argument 2 (locality) must be a compile-time constant int. */
1011 if (TREE_CODE (arg2
) != INTEGER_CST
)
1013 error ("third argument to %<__builtin_prefetch%> must be a constant");
1014 arg2
= integer_zero_node
;
1016 op2
= expand_normal (arg2
);
1017 /* Argument 2 must be 0, 1, 2, or 3. */
1018 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1020 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1024 #ifdef HAVE_prefetch
1027 if ((! (*insn_data
[(int) CODE_FOR_prefetch
].operand
[0].predicate
)
1029 insn_data
[(int) CODE_FOR_prefetch
].operand
[0].mode
))
1030 || (GET_MODE (op0
) != Pmode
))
1032 op0
= convert_memory_address (Pmode
, op0
);
1033 op0
= force_reg (Pmode
, op0
);
1035 emit_insn (gen_prefetch (op0
, op1
, op2
));
1039 /* Don't do anything with direct references to volatile memory, but
1040 generate code to handle other side effects. */
1041 if (!MEM_P (op0
) && side_effects_p (op0
))
1045 /* Get a MEM rtx for expression EXP which is the address of an operand
1046 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1047 the maximum length of the block of memory that might be accessed or
1051 get_memory_rtx (tree exp
, tree len
)
1053 rtx addr
= expand_expr (exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1054 rtx mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1056 /* Get an expression we can use to find the attributes to assign to MEM.
1057 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1058 we can. First remove any nops. */
1059 while ((TREE_CODE (exp
) == NOP_EXPR
|| TREE_CODE (exp
) == CONVERT_EXPR
1060 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
1061 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1062 exp
= TREE_OPERAND (exp
, 0);
1064 if (TREE_CODE (exp
) == ADDR_EXPR
)
1065 exp
= TREE_OPERAND (exp
, 0);
1066 else if (POINTER_TYPE_P (TREE_TYPE (exp
)))
1067 exp
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (exp
)), exp
);
1071 /* Honor attributes derived from exp, except for the alias set
1072 (as builtin stringops may alias with anything) and the size
1073 (as stringops may access multiple array elements). */
1076 set_mem_attributes (mem
, exp
, 0);
1078 /* Allow the string and memory builtins to overflow from one
1079 field into another, see http://gcc.gnu.org/PR23561.
1080 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1081 memory accessed by the string or memory builtin will fit
1082 within the field. */
1083 if (MEM_EXPR (mem
) && TREE_CODE (MEM_EXPR (mem
)) == COMPONENT_REF
)
1085 tree mem_expr
= MEM_EXPR (mem
);
1086 HOST_WIDE_INT offset
= -1, length
= -1;
1089 while (TREE_CODE (inner
) == ARRAY_REF
1090 || TREE_CODE (inner
) == NOP_EXPR
1091 || TREE_CODE (inner
) == CONVERT_EXPR
1092 || TREE_CODE (inner
) == NON_LVALUE_EXPR
1093 || TREE_CODE (inner
) == VIEW_CONVERT_EXPR
1094 || TREE_CODE (inner
) == SAVE_EXPR
)
1095 inner
= TREE_OPERAND (inner
, 0);
1097 gcc_assert (TREE_CODE (inner
) == COMPONENT_REF
);
1099 if (MEM_OFFSET (mem
)
1100 && GET_CODE (MEM_OFFSET (mem
)) == CONST_INT
)
1101 offset
= INTVAL (MEM_OFFSET (mem
));
1103 if (offset
>= 0 && len
&& host_integerp (len
, 0))
1104 length
= tree_low_cst (len
, 0);
1106 while (TREE_CODE (inner
) == COMPONENT_REF
)
1108 tree field
= TREE_OPERAND (inner
, 1);
1109 gcc_assert (! DECL_BIT_FIELD (field
));
1110 gcc_assert (TREE_CODE (mem_expr
) == COMPONENT_REF
);
1111 gcc_assert (field
== TREE_OPERAND (mem_expr
, 1));
1114 && TYPE_SIZE_UNIT (TREE_TYPE (inner
))
1115 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner
)), 0))
1118 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner
)), 0);
1119 /* If we can prove the memory starting at XEXP (mem, 0)
1120 and ending at XEXP (mem, 0) + LENGTH will fit into
1121 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1124 && offset
+ length
<= size
)
1129 && host_integerp (DECL_FIELD_OFFSET (field
), 0))
1130 offset
+= tree_low_cst (DECL_FIELD_OFFSET (field
), 0)
1131 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 1)
1139 mem_expr
= TREE_OPERAND (mem_expr
, 0);
1140 inner
= TREE_OPERAND (inner
, 0);
1143 if (mem_expr
== NULL
)
1145 if (mem_expr
!= MEM_EXPR (mem
))
1147 set_mem_expr (mem
, mem_expr
);
1148 set_mem_offset (mem
, offset
>= 0 ? GEN_INT (offset
) : NULL_RTX
);
1151 set_mem_alias_set (mem
, 0);
1152 set_mem_size (mem
, NULL_RTX
);
1158 /* Built-in functions to perform an untyped call and return. */
1160 /* For each register that may be used for calling a function, this
1161 gives a mode used to copy the register's value. VOIDmode indicates
1162 the register is not used for calling a function. If the machine
1163 has register windows, this gives only the outbound registers.
1164 INCOMING_REGNO gives the corresponding inbound register. */
1165 static enum machine_mode apply_args_mode
[FIRST_PSEUDO_REGISTER
];
1167 /* For each register that may be used for returning values, this gives
1168 a mode used to copy the register's value. VOIDmode indicates the
1169 register is not used for returning values. If the machine has
1170 register windows, this gives only the outbound registers.
1171 INCOMING_REGNO gives the corresponding inbound register. */
1172 static enum machine_mode apply_result_mode
[FIRST_PSEUDO_REGISTER
];
1174 /* For each register that may be used for calling a function, this
1175 gives the offset of that register into the block returned by
1176 __builtin_apply_args. 0 indicates that the register is not
1177 used for calling a function. */
1178 static int apply_args_reg_offset
[FIRST_PSEUDO_REGISTER
];
1180 /* Return the size required for the block returned by __builtin_apply_args,
1181 and initialize apply_args_mode. */
1184 apply_args_size (void)
1186 static int size
= -1;
1189 enum machine_mode mode
;
1191 /* The values computed by this function never change. */
1194 /* The first value is the incoming arg-pointer. */
1195 size
= GET_MODE_SIZE (Pmode
);
1197 /* The second value is the structure value address unless this is
1198 passed as an "invisible" first argument. */
1199 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1200 size
+= GET_MODE_SIZE (Pmode
);
1202 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1203 if (FUNCTION_ARG_REGNO_P (regno
))
1205 mode
= reg_raw_mode
[regno
];
1207 gcc_assert (mode
!= VOIDmode
);
1209 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1210 if (size
% align
!= 0)
1211 size
= CEIL (size
, align
) * align
;
1212 apply_args_reg_offset
[regno
] = size
;
1213 size
+= GET_MODE_SIZE (mode
);
1214 apply_args_mode
[regno
] = mode
;
1218 apply_args_mode
[regno
] = VOIDmode
;
1219 apply_args_reg_offset
[regno
] = 0;
1225 /* Return the size required for the block returned by __builtin_apply,
1226 and initialize apply_result_mode. */
1229 apply_result_size (void)
1231 static int size
= -1;
1233 enum machine_mode mode
;
1235 /* The values computed by this function never change. */
1240 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1241 if (FUNCTION_VALUE_REGNO_P (regno
))
1243 mode
= reg_raw_mode
[regno
];
1245 gcc_assert (mode
!= VOIDmode
);
1247 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1248 if (size
% align
!= 0)
1249 size
= CEIL (size
, align
) * align
;
1250 size
+= GET_MODE_SIZE (mode
);
1251 apply_result_mode
[regno
] = mode
;
1254 apply_result_mode
[regno
] = VOIDmode
;
1256 /* Allow targets that use untyped_call and untyped_return to override
1257 the size so that machine-specific information can be stored here. */
1258 #ifdef APPLY_RESULT_SIZE
1259 size
= APPLY_RESULT_SIZE
;
1265 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1266 /* Create a vector describing the result block RESULT. If SAVEP is true,
1267 the result block is used to save the values; otherwise it is used to
1268 restore the values. */
1271 result_vector (int savep
, rtx result
)
1273 int regno
, size
, align
, nelts
;
1274 enum machine_mode mode
;
1276 rtx
*savevec
= alloca (FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
1279 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1280 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1282 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1283 if (size
% align
!= 0)
1284 size
= CEIL (size
, align
) * align
;
1285 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1286 mem
= adjust_address (result
, mode
, size
);
1287 savevec
[nelts
++] = (savep
1288 ? gen_rtx_SET (VOIDmode
, mem
, reg
)
1289 : gen_rtx_SET (VOIDmode
, reg
, mem
));
1290 size
+= GET_MODE_SIZE (mode
);
1292 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1294 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1296 /* Save the state required to perform an untyped call with the same
1297 arguments as were passed to the current function. */
1300 expand_builtin_apply_args_1 (void)
1303 int size
, align
, regno
;
1304 enum machine_mode mode
;
1305 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1307 /* Create a block where the arg-pointer, structure value address,
1308 and argument registers can be saved. */
1309 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1311 /* Walk past the arg-pointer and structure value address. */
1312 size
= GET_MODE_SIZE (Pmode
);
1313 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1314 size
+= GET_MODE_SIZE (Pmode
);
1316 /* Save each register used in calling a function to the block. */
1317 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1318 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1320 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1321 if (size
% align
!= 0)
1322 size
= CEIL (size
, align
) * align
;
1324 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1326 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1327 size
+= GET_MODE_SIZE (mode
);
1330 /* Save the arg pointer to the block. */
1331 tem
= copy_to_reg (virtual_incoming_args_rtx
);
1332 #ifdef STACK_GROWS_DOWNWARD
1333 /* We need the pointer as the caller actually passed them to us, not
1334 as we might have pretended they were passed. Make sure it's a valid
1335 operand, as emit_move_insn isn't expected to handle a PLUS. */
1337 = force_operand (plus_constant (tem
, current_function_pretend_args_size
),
1340 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1342 size
= GET_MODE_SIZE (Pmode
);
1344 /* Save the structure value address unless this is passed as an
1345 "invisible" first argument. */
1346 if (struct_incoming_value
)
1348 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1349 copy_to_reg (struct_incoming_value
));
1350 size
+= GET_MODE_SIZE (Pmode
);
1353 /* Return the address of the block. */
1354 return copy_addr_to_reg (XEXP (registers
, 0));
1357 /* __builtin_apply_args returns block of memory allocated on
1358 the stack into which is stored the arg pointer, structure
1359 value address, static chain, and all the registers that might
1360 possibly be used in performing a function call. The code is
1361 moved to the start of the function so the incoming values are
1365 expand_builtin_apply_args (void)
1367 /* Don't do __builtin_apply_args more than once in a function.
1368 Save the result of the first call and reuse it. */
1369 if (apply_args_value
!= 0)
1370 return apply_args_value
;
1372 /* When this function is called, it means that registers must be
1373 saved on entry to this function. So we migrate the
1374 call to the first insn of this function. */
1379 temp
= expand_builtin_apply_args_1 ();
1383 apply_args_value
= temp
;
1385 /* Put the insns after the NOTE that starts the function.
1386 If this is inside a start_sequence, make the outer-level insn
1387 chain current, so the code is placed at the start of the
1389 push_topmost_sequence ();
1390 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1391 pop_topmost_sequence ();
1396 /* Perform an untyped call and save the state required to perform an
1397 untyped return of whatever value was returned by the given function. */
1400 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1402 int size
, align
, regno
;
1403 enum machine_mode mode
;
1404 rtx incoming_args
, result
, reg
, dest
, src
, call_insn
;
1405 rtx old_stack_level
= 0;
1406 rtx call_fusage
= 0;
1407 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1409 arguments
= convert_memory_address (Pmode
, arguments
);
1411 /* Create a block where the return registers can be saved. */
1412 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1414 /* Fetch the arg pointer from the ARGUMENTS block. */
1415 incoming_args
= gen_reg_rtx (Pmode
);
1416 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1417 #ifndef STACK_GROWS_DOWNWARD
1418 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1419 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1422 /* Push a new argument block and copy the arguments. Do not allow
1423 the (potential) memcpy call below to interfere with our stack
1425 do_pending_stack_adjust ();
1428 /* Save the stack with nonlocal if available. */
1429 #ifdef HAVE_save_stack_nonlocal
1430 if (HAVE_save_stack_nonlocal
)
1431 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
, NULL_RTX
);
1434 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
1436 /* Allocate a block of memory onto the stack and copy the memory
1437 arguments to the outgoing arguments address. */
1438 allocate_dynamic_stack_space (argsize
, 0, BITS_PER_UNIT
);
1439 dest
= virtual_outgoing_args_rtx
;
1440 #ifndef STACK_GROWS_DOWNWARD
1441 if (GET_CODE (argsize
) == CONST_INT
)
1442 dest
= plus_constant (dest
, -INTVAL (argsize
));
1444 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1446 dest
= gen_rtx_MEM (BLKmode
, dest
);
1447 set_mem_align (dest
, PARM_BOUNDARY
);
1448 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1449 set_mem_align (src
, PARM_BOUNDARY
);
1450 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1452 /* Refer to the argument block. */
1454 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1455 set_mem_align (arguments
, PARM_BOUNDARY
);
1457 /* Walk past the arg-pointer and structure value address. */
1458 size
= GET_MODE_SIZE (Pmode
);
1460 size
+= GET_MODE_SIZE (Pmode
);
1462 /* Restore each of the registers previously saved. Make USE insns
1463 for each of these registers for use in making the call. */
1464 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1465 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1467 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1468 if (size
% align
!= 0)
1469 size
= CEIL (size
, align
) * align
;
1470 reg
= gen_rtx_REG (mode
, regno
);
1471 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1472 use_reg (&call_fusage
, reg
);
1473 size
+= GET_MODE_SIZE (mode
);
1476 /* Restore the structure value address unless this is passed as an
1477 "invisible" first argument. */
1478 size
= GET_MODE_SIZE (Pmode
);
1481 rtx value
= gen_reg_rtx (Pmode
);
1482 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1483 emit_move_insn (struct_value
, value
);
1484 if (REG_P (struct_value
))
1485 use_reg (&call_fusage
, struct_value
);
1486 size
+= GET_MODE_SIZE (Pmode
);
1489 /* All arguments and registers used for the call are set up by now! */
1490 function
= prepare_call_address (function
, NULL
, &call_fusage
, 0, 0);
1492 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1493 and we don't want to load it into a register as an optimization,
1494 because prepare_call_address already did it if it should be done. */
1495 if (GET_CODE (function
) != SYMBOL_REF
)
1496 function
= memory_address (FUNCTION_MODE
, function
);
1498 /* Generate the actual call instruction and save the return value. */
1499 #ifdef HAVE_untyped_call
1500 if (HAVE_untyped_call
)
1501 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE
, function
),
1502 result
, result_vector (1, result
)));
1505 #ifdef HAVE_call_value
1506 if (HAVE_call_value
)
1510 /* Locate the unique return register. It is not possible to
1511 express a call that sets more than one return register using
1512 call_value; use untyped_call for that. In fact, untyped_call
1513 only needs to save the return registers in the given block. */
1514 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1515 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1517 gcc_assert (!valreg
); /* HAVE_untyped_call required. */
1519 valreg
= gen_rtx_REG (mode
, regno
);
1522 emit_call_insn (GEN_CALL_VALUE (valreg
,
1523 gen_rtx_MEM (FUNCTION_MODE
, function
),
1524 const0_rtx
, NULL_RTX
, const0_rtx
));
1526 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1532 /* Find the CALL insn we just emitted, and attach the register usage
1534 call_insn
= last_call_insn ();
1535 add_function_usage_to (call_insn
, call_fusage
);
1537 /* Restore the stack. */
1538 #ifdef HAVE_save_stack_nonlocal
1539 if (HAVE_save_stack_nonlocal
)
1540 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
, NULL_RTX
);
1543 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
1547 /* Return the address of the result block. */
1548 result
= copy_addr_to_reg (XEXP (result
, 0));
1549 return convert_memory_address (ptr_mode
, result
);
1552 /* Perform an untyped return. */
1555 expand_builtin_return (rtx result
)
1557 int size
, align
, regno
;
1558 enum machine_mode mode
;
1560 rtx call_fusage
= 0;
1562 result
= convert_memory_address (Pmode
, result
);
1564 apply_result_size ();
1565 result
= gen_rtx_MEM (BLKmode
, result
);
1567 #ifdef HAVE_untyped_return
1568 if (HAVE_untyped_return
)
1570 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
1576 /* Restore the return value and note that each value is used. */
1578 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1579 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1581 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1582 if (size
% align
!= 0)
1583 size
= CEIL (size
, align
) * align
;
1584 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1585 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1587 push_to_sequence (call_fusage
);
1588 emit_insn (gen_rtx_USE (VOIDmode
, reg
));
1589 call_fusage
= get_insns ();
1591 size
+= GET_MODE_SIZE (mode
);
1594 /* Put the USE insns before the return. */
1595 emit_insn (call_fusage
);
1597 /* Return whatever values was restored by jumping directly to the end
1599 expand_naked_return ();
1602 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1604 static enum type_class
1605 type_to_class (tree type
)
1607 switch (TREE_CODE (type
))
1609 case VOID_TYPE
: return void_type_class
;
1610 case INTEGER_TYPE
: return integer_type_class
;
1611 case ENUMERAL_TYPE
: return enumeral_type_class
;
1612 case BOOLEAN_TYPE
: return boolean_type_class
;
1613 case POINTER_TYPE
: return pointer_type_class
;
1614 case REFERENCE_TYPE
: return reference_type_class
;
1615 case OFFSET_TYPE
: return offset_type_class
;
1616 case REAL_TYPE
: return real_type_class
;
1617 case COMPLEX_TYPE
: return complex_type_class
;
1618 case FUNCTION_TYPE
: return function_type_class
;
1619 case METHOD_TYPE
: return method_type_class
;
1620 case RECORD_TYPE
: return record_type_class
;
1622 case QUAL_UNION_TYPE
: return union_type_class
;
1623 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1624 ? string_type_class
: array_type_class
);
1625 case LANG_TYPE
: return lang_type_class
;
1626 default: return no_type_class
;
1630 /* Expand a call EXP to __builtin_classify_type. */
1633 expand_builtin_classify_type (tree exp
)
1635 if (call_expr_nargs (exp
))
1636 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1637 return GEN_INT (no_type_class
);
1640 /* This helper macro, meant to be used in mathfn_built_in below,
1641 determines which among a set of three builtin math functions is
1642 appropriate for a given type mode. The `F' and `L' cases are
1643 automatically generated from the `double' case. */
1644 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1645 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1646 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1647 fcodel = BUILT_IN_MATHFN##L ; break;
1649 /* Return mathematic function equivalent to FN but operating directly
1650 on TYPE, if available. If we can't do the conversion, return zero. */
1652 mathfn_built_in (tree type
, enum built_in_function fn
)
1654 enum built_in_function fcode
, fcodef
, fcodel
;
1658 CASE_MATHFN (BUILT_IN_ACOS
)
1659 CASE_MATHFN (BUILT_IN_ACOSH
)
1660 CASE_MATHFN (BUILT_IN_ASIN
)
1661 CASE_MATHFN (BUILT_IN_ASINH
)
1662 CASE_MATHFN (BUILT_IN_ATAN
)
1663 CASE_MATHFN (BUILT_IN_ATAN2
)
1664 CASE_MATHFN (BUILT_IN_ATANH
)
1665 CASE_MATHFN (BUILT_IN_CBRT
)
1666 CASE_MATHFN (BUILT_IN_CEIL
)
1667 CASE_MATHFN (BUILT_IN_CEXPI
)
1668 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1669 CASE_MATHFN (BUILT_IN_COS
)
1670 CASE_MATHFN (BUILT_IN_COSH
)
1671 CASE_MATHFN (BUILT_IN_DREM
)
1672 CASE_MATHFN (BUILT_IN_ERF
)
1673 CASE_MATHFN (BUILT_IN_ERFC
)
1674 CASE_MATHFN (BUILT_IN_EXP
)
1675 CASE_MATHFN (BUILT_IN_EXP10
)
1676 CASE_MATHFN (BUILT_IN_EXP2
)
1677 CASE_MATHFN (BUILT_IN_EXPM1
)
1678 CASE_MATHFN (BUILT_IN_FABS
)
1679 CASE_MATHFN (BUILT_IN_FDIM
)
1680 CASE_MATHFN (BUILT_IN_FLOOR
)
1681 CASE_MATHFN (BUILT_IN_FMA
)
1682 CASE_MATHFN (BUILT_IN_FMAX
)
1683 CASE_MATHFN (BUILT_IN_FMIN
)
1684 CASE_MATHFN (BUILT_IN_FMOD
)
1685 CASE_MATHFN (BUILT_IN_FREXP
)
1686 CASE_MATHFN (BUILT_IN_GAMMA
)
1687 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1688 CASE_MATHFN (BUILT_IN_HYPOT
)
1689 CASE_MATHFN (BUILT_IN_ILOGB
)
1690 CASE_MATHFN (BUILT_IN_INF
)
1691 CASE_MATHFN (BUILT_IN_ISINF
)
1692 CASE_MATHFN (BUILT_IN_J0
)
1693 CASE_MATHFN (BUILT_IN_J1
)
1694 CASE_MATHFN (BUILT_IN_JN
)
1695 CASE_MATHFN (BUILT_IN_LCEIL
)
1696 CASE_MATHFN (BUILT_IN_LDEXP
)
1697 CASE_MATHFN (BUILT_IN_LFLOOR
)
1698 CASE_MATHFN (BUILT_IN_LGAMMA
)
1699 CASE_MATHFN (BUILT_IN_LLCEIL
)
1700 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1701 CASE_MATHFN (BUILT_IN_LLRINT
)
1702 CASE_MATHFN (BUILT_IN_LLROUND
)
1703 CASE_MATHFN (BUILT_IN_LOG
)
1704 CASE_MATHFN (BUILT_IN_LOG10
)
1705 CASE_MATHFN (BUILT_IN_LOG1P
)
1706 CASE_MATHFN (BUILT_IN_LOG2
)
1707 CASE_MATHFN (BUILT_IN_LOGB
)
1708 CASE_MATHFN (BUILT_IN_LRINT
)
1709 CASE_MATHFN (BUILT_IN_LROUND
)
1710 CASE_MATHFN (BUILT_IN_MODF
)
1711 CASE_MATHFN (BUILT_IN_NAN
)
1712 CASE_MATHFN (BUILT_IN_NANS
)
1713 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1714 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1715 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1716 CASE_MATHFN (BUILT_IN_POW
)
1717 CASE_MATHFN (BUILT_IN_POWI
)
1718 CASE_MATHFN (BUILT_IN_POW10
)
1719 CASE_MATHFN (BUILT_IN_REMAINDER
)
1720 CASE_MATHFN (BUILT_IN_REMQUO
)
1721 CASE_MATHFN (BUILT_IN_RINT
)
1722 CASE_MATHFN (BUILT_IN_ROUND
)
1723 CASE_MATHFN (BUILT_IN_SCALB
)
1724 CASE_MATHFN (BUILT_IN_SCALBLN
)
1725 CASE_MATHFN (BUILT_IN_SCALBN
)
1726 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1727 CASE_MATHFN (BUILT_IN_SIN
)
1728 CASE_MATHFN (BUILT_IN_SINCOS
)
1729 CASE_MATHFN (BUILT_IN_SINH
)
1730 CASE_MATHFN (BUILT_IN_SQRT
)
1731 CASE_MATHFN (BUILT_IN_TAN
)
1732 CASE_MATHFN (BUILT_IN_TANH
)
1733 CASE_MATHFN (BUILT_IN_TGAMMA
)
1734 CASE_MATHFN (BUILT_IN_TRUNC
)
1735 CASE_MATHFN (BUILT_IN_Y0
)
1736 CASE_MATHFN (BUILT_IN_Y1
)
1737 CASE_MATHFN (BUILT_IN_YN
)
1743 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1744 return implicit_built_in_decls
[fcode
];
1745 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1746 return implicit_built_in_decls
[fcodef
];
1747 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1748 return implicit_built_in_decls
[fcodel
];
1753 /* If errno must be maintained, expand the RTL to check if the result,
1754 TARGET, of a built-in function call, EXP, is NaN, and if so set
1758 expand_errno_check (tree exp
, rtx target
)
1760 rtx lab
= gen_label_rtx ();
1762 /* Test the result; if it is NaN, set errno=EDOM because
1763 the argument was not in the domain. */
1764 emit_cmp_and_jump_insns (target
, target
, EQ
, 0, GET_MODE (target
),
1768 /* If this built-in doesn't throw an exception, set errno directly. */
1769 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
1771 #ifdef GEN_ERRNO_RTX
1772 rtx errno_rtx
= GEN_ERRNO_RTX
;
1775 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
1777 emit_move_insn (errno_rtx
, GEN_INT (TARGET_EDOM
));
1783 /* We can't set errno=EDOM directly; let the library call do it.
1784 Pop the arguments right away in case the call gets deleted. */
1786 expand_call (exp
, target
, 0);
1791 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1792 Return NULL_RTX if a normal call should be emitted rather than expanding
1793 the function in-line. EXP is the expression that is a call to the builtin
1794 function; if convenient, the result should be placed in TARGET.
1795 SUBTARGET may be used as the target for computing one of EXP's operands. */
1798 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
1800 optab builtin_optab
;
1801 rtx op0
, insns
, before_call
;
1802 tree fndecl
= get_callee_fndecl (exp
);
1803 enum machine_mode mode
;
1804 bool errno_set
= false;
1807 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
1810 arg
= CALL_EXPR_ARG (exp
, 0);
1812 switch (DECL_FUNCTION_CODE (fndecl
))
1814 CASE_FLT_FN (BUILT_IN_SQRT
):
1815 errno_set
= ! tree_expr_nonnegative_p (arg
);
1816 builtin_optab
= sqrt_optab
;
1818 CASE_FLT_FN (BUILT_IN_EXP
):
1819 errno_set
= true; builtin_optab
= exp_optab
; break;
1820 CASE_FLT_FN (BUILT_IN_EXP10
):
1821 CASE_FLT_FN (BUILT_IN_POW10
):
1822 errno_set
= true; builtin_optab
= exp10_optab
; break;
1823 CASE_FLT_FN (BUILT_IN_EXP2
):
1824 errno_set
= true; builtin_optab
= exp2_optab
; break;
1825 CASE_FLT_FN (BUILT_IN_EXPM1
):
1826 errno_set
= true; builtin_optab
= expm1_optab
; break;
1827 CASE_FLT_FN (BUILT_IN_LOGB
):
1828 errno_set
= true; builtin_optab
= logb_optab
; break;
1829 CASE_FLT_FN (BUILT_IN_LOG
):
1830 errno_set
= true; builtin_optab
= log_optab
; break;
1831 CASE_FLT_FN (BUILT_IN_LOG10
):
1832 errno_set
= true; builtin_optab
= log10_optab
; break;
1833 CASE_FLT_FN (BUILT_IN_LOG2
):
1834 errno_set
= true; builtin_optab
= log2_optab
; break;
1835 CASE_FLT_FN (BUILT_IN_LOG1P
):
1836 errno_set
= true; builtin_optab
= log1p_optab
; break;
1837 CASE_FLT_FN (BUILT_IN_ASIN
):
1838 builtin_optab
= asin_optab
; break;
1839 CASE_FLT_FN (BUILT_IN_ACOS
):
1840 builtin_optab
= acos_optab
; break;
1841 CASE_FLT_FN (BUILT_IN_TAN
):
1842 builtin_optab
= tan_optab
; break;
1843 CASE_FLT_FN (BUILT_IN_ATAN
):
1844 builtin_optab
= atan_optab
; break;
1845 CASE_FLT_FN (BUILT_IN_FLOOR
):
1846 builtin_optab
= floor_optab
; break;
1847 CASE_FLT_FN (BUILT_IN_CEIL
):
1848 builtin_optab
= ceil_optab
; break;
1849 CASE_FLT_FN (BUILT_IN_TRUNC
):
1850 builtin_optab
= btrunc_optab
; break;
1851 CASE_FLT_FN (BUILT_IN_ROUND
):
1852 builtin_optab
= round_optab
; break;
1853 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
1854 builtin_optab
= nearbyint_optab
;
1855 if (flag_trapping_math
)
1857 /* Else fallthrough and expand as rint. */
1858 CASE_FLT_FN (BUILT_IN_RINT
):
1859 builtin_optab
= rint_optab
; break;
1864 /* Make a suitable register to place result in. */
1865 mode
= TYPE_MODE (TREE_TYPE (exp
));
1867 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
1870 /* Before working hard, check whether the instruction is available. */
1871 if (builtin_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
1873 target
= gen_reg_rtx (mode
);
1875 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1876 need to expand the argument again. This way, we will not perform
1877 side-effects more the once. */
1878 narg
= builtin_save_expr (arg
);
1882 exp
= build_call_expr (fndecl
, 1, arg
);
1885 op0
= expand_expr (arg
, subtarget
, VOIDmode
, 0);
1889 /* Compute into TARGET.
1890 Set TARGET to wherever the result comes back. */
1891 target
= expand_unop (mode
, builtin_optab
, op0
, target
, 0);
1896 expand_errno_check (exp
, target
);
1898 /* Output the entire sequence. */
1899 insns
= get_insns ();
1905 /* If we were unable to expand via the builtin, stop the sequence
1906 (without outputting the insns) and call to the library function
1907 with the stabilized argument list. */
1911 before_call
= get_last_insn ();
1913 target
= expand_call (exp
, target
, target
== const0_rtx
);
1915 /* If this is a sqrt operation and we don't care about errno, try to
1916 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1917 This allows the semantics of the libcall to be visible to the RTL
1919 if (builtin_optab
== sqrt_optab
&& !errno_set
)
1921 /* Search backwards through the insns emitted by expand_call looking
1922 for the instruction with the REG_RETVAL note. */
1923 rtx last
= get_last_insn ();
1924 while (last
!= before_call
)
1926 if (find_reg_note (last
, REG_RETVAL
, NULL
))
1928 rtx note
= find_reg_note (last
, REG_EQUAL
, NULL
);
1929 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1930 two elements, i.e. symbol_ref(sqrt) and the operand. */
1932 && GET_CODE (note
) == EXPR_LIST
1933 && GET_CODE (XEXP (note
, 0)) == EXPR_LIST
1934 && XEXP (XEXP (note
, 0), 1) != NULL_RTX
1935 && XEXP (XEXP (XEXP (note
, 0), 1), 1) == NULL_RTX
)
1937 rtx operand
= XEXP (XEXP (XEXP (note
, 0), 1), 0);
1938 /* Check operand is a register with expected mode. */
1941 && GET_MODE (operand
) == mode
)
1943 /* Replace the REG_EQUAL note with a SQRT rtx. */
1944 rtx equiv
= gen_rtx_SQRT (mode
, operand
);
1945 set_unique_reg_note (last
, REG_EQUAL
, equiv
);
1950 last
= PREV_INSN (last
);
1957 /* Expand a call to the builtin binary math functions (pow and atan2).
1958 Return NULL_RTX if a normal call should be emitted rather than expanding the
1959 function in-line. EXP is the expression that is a call to the builtin
1960 function; if convenient, the result should be placed in TARGET.
1961 SUBTARGET may be used as the target for computing one of EXP's
1965 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
1967 optab builtin_optab
;
1968 rtx op0
, op1
, insns
;
1969 int op1_type
= REAL_TYPE
;
1970 tree fndecl
= get_callee_fndecl (exp
);
1971 tree arg0
, arg1
, narg
;
1972 enum machine_mode mode
;
1973 bool errno_set
= true;
1976 switch (DECL_FUNCTION_CODE (fndecl
))
1978 CASE_FLT_FN (BUILT_IN_SCALBN
):
1979 CASE_FLT_FN (BUILT_IN_SCALBLN
):
1980 CASE_FLT_FN (BUILT_IN_LDEXP
):
1981 op1_type
= INTEGER_TYPE
;
1986 if (!validate_arglist (exp
, REAL_TYPE
, op1_type
, VOID_TYPE
))
1989 arg0
= CALL_EXPR_ARG (exp
, 0);
1990 arg1
= CALL_EXPR_ARG (exp
, 1);
1992 switch (DECL_FUNCTION_CODE (fndecl
))
1994 CASE_FLT_FN (BUILT_IN_POW
):
1995 builtin_optab
= pow_optab
; break;
1996 CASE_FLT_FN (BUILT_IN_ATAN2
):
1997 builtin_optab
= atan2_optab
; break;
1998 CASE_FLT_FN (BUILT_IN_SCALB
):
1999 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2001 builtin_optab
= scalb_optab
; break;
2002 CASE_FLT_FN (BUILT_IN_SCALBN
):
2003 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2004 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2006 /* Fall through... */
2007 CASE_FLT_FN (BUILT_IN_LDEXP
):
2008 builtin_optab
= ldexp_optab
; break;
2009 CASE_FLT_FN (BUILT_IN_FMOD
):
2010 builtin_optab
= fmod_optab
; break;
2011 CASE_FLT_FN (BUILT_IN_REMAINDER
):
2012 CASE_FLT_FN (BUILT_IN_DREM
):
2013 builtin_optab
= remainder_optab
; break;
2018 /* Make a suitable register to place result in. */
2019 mode
= TYPE_MODE (TREE_TYPE (exp
));
2021 /* Before working hard, check whether the instruction is available. */
2022 if (builtin_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
2025 target
= gen_reg_rtx (mode
);
2027 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2030 /* Always stabilize the argument list. */
2031 narg
= builtin_save_expr (arg1
);
2037 narg
= builtin_save_expr (arg0
);
2045 exp
= build_call_expr (fndecl
, 2, arg0
, arg1
);
2047 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2048 op1
= expand_normal (arg1
);
2052 /* Compute into TARGET.
2053 Set TARGET to wherever the result comes back. */
2054 target
= expand_binop (mode
, builtin_optab
, op0
, op1
,
2055 target
, 0, OPTAB_DIRECT
);
2057 /* If we were unable to expand via the builtin, stop the sequence
2058 (without outputting the insns) and call to the library function
2059 with the stabilized argument list. */
2063 return expand_call (exp
, target
, target
== const0_rtx
);
2067 expand_errno_check (exp
, target
);
2069 /* Output the entire sequence. */
2070 insns
= get_insns ();
2077 /* Expand a call to the builtin sin and cos math functions.
2078 Return NULL_RTX if a normal call should be emitted rather than expanding the
2079 function in-line. EXP is the expression that is a call to the builtin
2080 function; if convenient, the result should be placed in TARGET.
2081 SUBTARGET may be used as the target for computing one of EXP's
2085 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2087 optab builtin_optab
;
2089 tree fndecl
= get_callee_fndecl (exp
);
2090 enum machine_mode mode
;
2093 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2096 arg
= CALL_EXPR_ARG (exp
, 0);
2098 switch (DECL_FUNCTION_CODE (fndecl
))
2100 CASE_FLT_FN (BUILT_IN_SIN
):
2101 CASE_FLT_FN (BUILT_IN_COS
):
2102 builtin_optab
= sincos_optab
; break;
2107 /* Make a suitable register to place result in. */
2108 mode
= TYPE_MODE (TREE_TYPE (exp
));
2110 /* Check if sincos insn is available, otherwise fallback
2111 to sin or cos insn. */
2112 if (builtin_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
2113 switch (DECL_FUNCTION_CODE (fndecl
))
2115 CASE_FLT_FN (BUILT_IN_SIN
):
2116 builtin_optab
= sin_optab
; break;
2117 CASE_FLT_FN (BUILT_IN_COS
):
2118 builtin_optab
= cos_optab
; break;
2123 /* Before working hard, check whether the instruction is available. */
2124 if (builtin_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2126 target
= gen_reg_rtx (mode
);
2128 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2129 need to expand the argument again. This way, we will not perform
2130 side-effects more the once. */
2131 narg
= save_expr (arg
);
2135 exp
= build_call_expr (fndecl
, 1, arg
);
2138 op0
= expand_expr (arg
, subtarget
, VOIDmode
, 0);
2142 /* Compute into TARGET.
2143 Set TARGET to wherever the result comes back. */
2144 if (builtin_optab
== sincos_optab
)
2148 switch (DECL_FUNCTION_CODE (fndecl
))
2150 CASE_FLT_FN (BUILT_IN_SIN
):
2151 result
= expand_twoval_unop (builtin_optab
, op0
, 0, target
, 0);
2153 CASE_FLT_FN (BUILT_IN_COS
):
2154 result
= expand_twoval_unop (builtin_optab
, op0
, target
, 0, 0);
2159 gcc_assert (result
);
2163 target
= expand_unop (mode
, builtin_optab
, op0
, target
, 0);
2168 /* Output the entire sequence. */
2169 insns
= get_insns ();
2175 /* If we were unable to expand via the builtin, stop the sequence
2176 (without outputting the insns) and call to the library function
2177 with the stabilized argument list. */
2181 target
= expand_call (exp
, target
, target
== const0_rtx
);
2186 /* Expand a call to one of the builtin math functions that operate on
2187 floating point argument and output an integer result (ilogb, isinf,
2189 Return 0 if a normal call should be emitted rather than expanding the
2190 function in-line. EXP is the expression that is a call to the builtin
2191 function; if convenient, the result should be placed in TARGET.
2192 SUBTARGET may be used as the target for computing one of EXP's operands. */
2195 expand_builtin_interclass_mathfn (tree exp
, rtx target
, rtx subtarget
)
2197 optab builtin_optab
;
2198 enum insn_code icode
;
2200 tree fndecl
= get_callee_fndecl (exp
);
2201 enum machine_mode mode
;
2202 bool errno_set
= false;
2205 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2208 arg
= CALL_EXPR_ARG (exp
, 0);
2210 switch (DECL_FUNCTION_CODE (fndecl
))
2212 CASE_FLT_FN (BUILT_IN_ILOGB
):
2213 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2214 CASE_FLT_FN (BUILT_IN_ISINF
):
2215 builtin_optab
= isinf_optab
; break;
2220 /* There's no easy way to detect the case we need to set EDOM. */
2221 if (flag_errno_math
&& errno_set
)
2224 /* Optab mode depends on the mode of the input argument. */
2225 mode
= TYPE_MODE (TREE_TYPE (arg
));
2227 icode
= builtin_optab
->handlers
[(int) mode
].insn_code
;
2229 /* Before working hard, check whether the instruction is available. */
2230 if (icode
!= CODE_FOR_nothing
)
2232 /* Make a suitable register to place result in. */
2234 || GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
2235 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
2237 gcc_assert (insn_data
[icode
].operand
[0].predicate
2238 (target
, GET_MODE (target
)));
2240 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2241 need to expand the argument again. This way, we will not perform
2242 side-effects more the once. */
2243 narg
= builtin_save_expr (arg
);
2247 exp
= build_call_expr (fndecl
, 1, arg
);
2250 op0
= expand_expr (arg
, subtarget
, VOIDmode
, 0);
2252 if (mode
!= GET_MODE (op0
))
2253 op0
= convert_to_mode (mode
, op0
, 0);
2255 /* Compute into TARGET.
2256 Set TARGET to wherever the result comes back. */
2257 emit_unop_insn (icode
, target
, op0
, UNKNOWN
);
2261 target
= expand_call (exp
, target
, target
== const0_rtx
);
2266 /* Expand a call to the builtin sincos math function.
2267 Return NULL_RTX if a normal call should be emitted rather than expanding the
2268 function in-line. EXP is the expression that is a call to the builtin
2272 expand_builtin_sincos (tree exp
)
2274 rtx op0
, op1
, op2
, target1
, target2
;
2275 enum machine_mode mode
;
2276 tree arg
, sinp
, cosp
;
2279 if (!validate_arglist (exp
, REAL_TYPE
,
2280 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2283 arg
= CALL_EXPR_ARG (exp
, 0);
2284 sinp
= CALL_EXPR_ARG (exp
, 1);
2285 cosp
= CALL_EXPR_ARG (exp
, 2);
2287 /* Make a suitable register to place result in. */
2288 mode
= TYPE_MODE (TREE_TYPE (arg
));
2290 /* Check if sincos insn is available, otherwise emit the call. */
2291 if (sincos_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
2294 target1
= gen_reg_rtx (mode
);
2295 target2
= gen_reg_rtx (mode
);
2297 op0
= expand_normal (arg
);
2298 op1
= expand_normal (build_fold_indirect_ref (sinp
));
2299 op2
= expand_normal (build_fold_indirect_ref (cosp
));
2301 /* Compute into target1 and target2.
2302 Set TARGET to wherever the result comes back. */
2303 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2304 gcc_assert (result
);
2306 /* Move target1 and target2 to the memory locations indicated
2308 emit_move_insn (op1
, target1
);
2309 emit_move_insn (op2
, target2
);
2314 /* Expand a call to the internal cexpi builtin to the sincos math function.
2315 EXP is the expression that is a call to the builtin function; if convenient,
2316 the result should be placed in TARGET. SUBTARGET may be used as the target
2317 for computing one of EXP's operands. */
2320 expand_builtin_cexpi (tree exp
, rtx target
, rtx subtarget
)
2322 tree fndecl
= get_callee_fndecl (exp
);
2324 enum machine_mode mode
;
2327 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2330 arg
= CALL_EXPR_ARG (exp
, 0);
2331 type
= TREE_TYPE (arg
);
2332 mode
= TYPE_MODE (TREE_TYPE (arg
));
2334 /* Try expanding via a sincos optab, fall back to emitting a libcall
2335 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2336 is only generated from sincos, cexp or if we have either of them. */
2337 if (sincos_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2339 op1
= gen_reg_rtx (mode
);
2340 op2
= gen_reg_rtx (mode
);
2342 op0
= expand_expr (arg
, subtarget
, VOIDmode
, 0);
2344 /* Compute into op1 and op2. */
2345 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2347 else if (TARGET_HAS_SINCOS
)
2349 tree call
, fn
= NULL_TREE
;
2353 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2354 fn
= built_in_decls
[BUILT_IN_SINCOSF
];
2355 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2356 fn
= built_in_decls
[BUILT_IN_SINCOS
];
2357 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2358 fn
= built_in_decls
[BUILT_IN_SINCOSL
];
2362 op1
= assign_temp (TREE_TYPE (arg
), 0, 1, 1);
2363 op2
= assign_temp (TREE_TYPE (arg
), 0, 1, 1);
2364 op1a
= copy_to_mode_reg (Pmode
, XEXP (op1
, 0));
2365 op2a
= copy_to_mode_reg (Pmode
, XEXP (op2
, 0));
2366 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2367 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2369 /* Make sure not to fold the sincos call again. */
2370 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2371 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2372 call
, 3, arg
, top1
, top2
));
2376 tree call
, fn
= NULL_TREE
, narg
;
2377 tree ctype
= build_complex_type (type
);
2379 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2380 fn
= built_in_decls
[BUILT_IN_CEXPF
];
2381 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2382 fn
= built_in_decls
[BUILT_IN_CEXP
];
2383 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2384 fn
= built_in_decls
[BUILT_IN_CEXPL
];
2388 /* If we don't have a decl for cexp create one. This is the
2389 friendliest fallback if the user calls __builtin_cexpi
2390 without full target C99 function support. */
2391 if (fn
== NULL_TREE
)
2394 const char *name
= NULL
;
2396 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2398 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2400 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2403 fntype
= build_function_type_list (ctype
, ctype
, NULL_TREE
);
2404 fn
= build_fn_decl (name
, fntype
);
2407 narg
= fold_build2 (COMPLEX_EXPR
, ctype
,
2408 build_real (type
, dconst0
), arg
);
2410 /* Make sure not to fold the cexp call again. */
2411 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2412 return expand_expr (build_call_nary (ctype
, call
, 1, narg
),
2413 target
, VOIDmode
, 0);
2416 /* Now build the proper return type. */
2417 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2418 make_tree (TREE_TYPE (arg
), op2
),
2419 make_tree (TREE_TYPE (arg
), op1
)),
2420 target
, VOIDmode
, 0);
2423 /* Expand a call to one of the builtin rounding functions gcc defines
2424 as an extension (lfloor and lceil). As these are gcc extensions we
2425 do not need to worry about setting errno to EDOM.
2426 If expanding via optab fails, lower expression to (int)(floor(x)).
2427 EXP is the expression that is a call to the builtin function;
2428 if convenient, the result should be placed in TARGET. SUBTARGET may
2429 be used as the target for computing one of EXP's operands. */
2432 expand_builtin_int_roundingfn (tree exp
, rtx target
, rtx subtarget
)
2434 convert_optab builtin_optab
;
2435 rtx op0
, insns
, tmp
;
2436 tree fndecl
= get_callee_fndecl (exp
);
2437 enum built_in_function fallback_fn
;
2438 tree fallback_fndecl
;
2439 enum machine_mode mode
;
2442 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2445 arg
= CALL_EXPR_ARG (exp
, 0);
2447 switch (DECL_FUNCTION_CODE (fndecl
))
2449 CASE_FLT_FN (BUILT_IN_LCEIL
):
2450 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2451 builtin_optab
= lceil_optab
;
2452 fallback_fn
= BUILT_IN_CEIL
;
2455 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2456 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2457 builtin_optab
= lfloor_optab
;
2458 fallback_fn
= BUILT_IN_FLOOR
;
2465 /* Make a suitable register to place result in. */
2466 mode
= TYPE_MODE (TREE_TYPE (exp
));
2468 target
= gen_reg_rtx (mode
);
2470 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2471 need to expand the argument again. This way, we will not perform
2472 side-effects more the once. */
2473 narg
= builtin_save_expr (arg
);
2477 exp
= build_call_expr (fndecl
, 1, arg
);
2480 op0
= expand_expr (arg
, subtarget
, VOIDmode
, 0);
2484 /* Compute into TARGET. */
2485 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2487 /* Output the entire sequence. */
2488 insns
= get_insns ();
2494 /* If we were unable to expand via the builtin, stop the sequence
2495 (without outputting the insns). */
2498 /* Fall back to floating point rounding optab. */
2499 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2501 /* For non-C99 targets we may end up without a fallback fndecl here
2502 if the user called __builtin_lfloor directly. In this case emit
2503 a call to the floor/ceil variants nevertheless. This should result
2504 in the best user experience for not full C99 targets. */
2505 if (fallback_fndecl
== NULL_TREE
)
2508 const char *name
= NULL
;
2510 switch (DECL_FUNCTION_CODE (fndecl
))
2512 case BUILT_IN_LCEIL
:
2513 case BUILT_IN_LLCEIL
:
2516 case BUILT_IN_LCEILF
:
2517 case BUILT_IN_LLCEILF
:
2520 case BUILT_IN_LCEILL
:
2521 case BUILT_IN_LLCEILL
:
2524 case BUILT_IN_LFLOOR
:
2525 case BUILT_IN_LLFLOOR
:
2528 case BUILT_IN_LFLOORF
:
2529 case BUILT_IN_LLFLOORF
:
2532 case BUILT_IN_LFLOORL
:
2533 case BUILT_IN_LLFLOORL
:
2540 fntype
= build_function_type_list (TREE_TYPE (arg
),
2541 TREE_TYPE (arg
), NULL_TREE
);
2542 fallback_fndecl
= build_fn_decl (name
, fntype
);
2545 exp
= build_call_expr (fallback_fndecl
, 1, arg
);
2547 tmp
= expand_normal (exp
);
2549 /* Truncate the result of floating point optab to integer
2550 via expand_fix (). */
2551 target
= gen_reg_rtx (mode
);
2552 expand_fix (target
, tmp
, 0);
2557 /* Expand a call to one of the builtin math functions doing integer
2559 Return 0 if a normal call should be emitted rather than expanding the
2560 function in-line. EXP is the expression that is a call to the builtin
2561 function; if convenient, the result should be placed in TARGET.
2562 SUBTARGET may be used as the target for computing one of EXP's operands. */
2565 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
, rtx subtarget
)
2567 convert_optab builtin_optab
;
2569 tree fndecl
= get_callee_fndecl (exp
);
2571 enum machine_mode mode
;
2573 /* There's no easy way to detect the case we need to set EDOM. */
2574 if (flag_errno_math
)
2577 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2580 arg
= CALL_EXPR_ARG (exp
, 0);
2582 switch (DECL_FUNCTION_CODE (fndecl
))
2584 CASE_FLT_FN (BUILT_IN_LRINT
):
2585 CASE_FLT_FN (BUILT_IN_LLRINT
):
2586 builtin_optab
= lrint_optab
; break;
2587 CASE_FLT_FN (BUILT_IN_LROUND
):
2588 CASE_FLT_FN (BUILT_IN_LLROUND
):
2589 builtin_optab
= lround_optab
; break;
2594 /* Make a suitable register to place result in. */
2595 mode
= TYPE_MODE (TREE_TYPE (exp
));
2597 target
= gen_reg_rtx (mode
);
2599 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2600 need to expand the argument again. This way, we will not perform
2601 side-effects more the once. */
2602 narg
= builtin_save_expr (arg
);
2606 exp
= build_call_expr (fndecl
, 1, arg
);
2609 op0
= expand_expr (arg
, subtarget
, VOIDmode
, 0);
2613 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2615 /* Output the entire sequence. */
2616 insns
= get_insns ();
2622 /* If we were unable to expand via the builtin, stop the sequence
2623 (without outputting the insns) and call to the library function
2624 with the stabilized argument list. */
2627 target
= expand_call (exp
, target
, target
== const0_rtx
);
2632 /* To evaluate powi(x,n), the floating point value x raised to the
2633 constant integer exponent n, we use a hybrid algorithm that
2634 combines the "window method" with look-up tables. For an
2635 introduction to exponentiation algorithms and "addition chains",
2636 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2637 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2638 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2639 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2641 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2642 multiplications to inline before calling the system library's pow
2643 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2644 so this default never requires calling pow, powf or powl. */
2646 #ifndef POWI_MAX_MULTS
2647 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2650 /* The size of the "optimal power tree" lookup table. All
2651 exponents less than this value are simply looked up in the
2652 powi_table below. This threshold is also used to size the
2653 cache of pseudo registers that hold intermediate results. */
2654 #define POWI_TABLE_SIZE 256
2656 /* The size, in bits of the window, used in the "window method"
2657 exponentiation algorithm. This is equivalent to a radix of
2658 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2659 #define POWI_WINDOW_SIZE 3
2661 /* The following table is an efficient representation of an
2662 "optimal power tree". For each value, i, the corresponding
2663 value, j, in the table states than an optimal evaluation
2664 sequence for calculating pow(x,i) can be found by evaluating
2665 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2666 100 integers is given in Knuth's "Seminumerical algorithms". */
2668 static const unsigned char powi_table
[POWI_TABLE_SIZE
] =
2670 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2671 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2672 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2673 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2674 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2675 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2676 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2677 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2678 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2679 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2680 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2681 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2682 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2683 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2684 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2685 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2686 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2687 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2688 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2689 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2690 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2691 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2692 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2693 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2694 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2695 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2696 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2697 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2698 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2699 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2700 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2701 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2705 /* Return the number of multiplications required to calculate
2706 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2707 subroutine of powi_cost. CACHE is an array indicating
2708 which exponents have already been calculated. */
2711 powi_lookup_cost (unsigned HOST_WIDE_INT n
, bool *cache
)
2713 /* If we've already calculated this exponent, then this evaluation
2714 doesn't require any additional multiplications. */
2719 return powi_lookup_cost (n
- powi_table
[n
], cache
)
2720 + powi_lookup_cost (powi_table
[n
], cache
) + 1;
2723 /* Return the number of multiplications required to calculate
2724 powi(x,n) for an arbitrary x, given the exponent N. This
2725 function needs to be kept in sync with expand_powi below. */
2728 powi_cost (HOST_WIDE_INT n
)
2730 bool cache
[POWI_TABLE_SIZE
];
2731 unsigned HOST_WIDE_INT digit
;
2732 unsigned HOST_WIDE_INT val
;
2738 /* Ignore the reciprocal when calculating the cost. */
2739 val
= (n
< 0) ? -n
: n
;
2741 /* Initialize the exponent cache. */
2742 memset (cache
, 0, POWI_TABLE_SIZE
* sizeof (bool));
2747 while (val
>= POWI_TABLE_SIZE
)
2751 digit
= val
& ((1 << POWI_WINDOW_SIZE
) - 1);
2752 result
+= powi_lookup_cost (digit
, cache
)
2753 + POWI_WINDOW_SIZE
+ 1;
2754 val
>>= POWI_WINDOW_SIZE
;
2763 return result
+ powi_lookup_cost (val
, cache
);
2766 /* Recursive subroutine of expand_powi. This function takes the array,
2767 CACHE, of already calculated exponents and an exponent N and returns
2768 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2771 expand_powi_1 (enum machine_mode mode
, unsigned HOST_WIDE_INT n
, rtx
*cache
)
2773 unsigned HOST_WIDE_INT digit
;
2777 if (n
< POWI_TABLE_SIZE
)
2782 target
= gen_reg_rtx (mode
);
2785 op0
= expand_powi_1 (mode
, n
- powi_table
[n
], cache
);
2786 op1
= expand_powi_1 (mode
, powi_table
[n
], cache
);
2790 target
= gen_reg_rtx (mode
);
2791 digit
= n
& ((1 << POWI_WINDOW_SIZE
) - 1);
2792 op0
= expand_powi_1 (mode
, n
- digit
, cache
);
2793 op1
= expand_powi_1 (mode
, digit
, cache
);
2797 target
= gen_reg_rtx (mode
);
2798 op0
= expand_powi_1 (mode
, n
>> 1, cache
);
2802 result
= expand_mult (mode
, op0
, op1
, target
, 0);
2803 if (result
!= target
)
2804 emit_move_insn (target
, result
);
2808 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2809 floating point operand in mode MODE, and N is the exponent. This
2810 function needs to be kept in sync with powi_cost above. */
2813 expand_powi (rtx x
, enum machine_mode mode
, HOST_WIDE_INT n
)
2815 unsigned HOST_WIDE_INT val
;
2816 rtx cache
[POWI_TABLE_SIZE
];
2820 return CONST1_RTX (mode
);
2822 val
= (n
< 0) ? -n
: n
;
2824 memset (cache
, 0, sizeof (cache
));
2827 result
= expand_powi_1 (mode
, (n
< 0) ? -n
: n
, cache
);
2829 /* If the original exponent was negative, reciprocate the result. */
2831 result
= expand_binop (mode
, sdiv_optab
, CONST1_RTX (mode
),
2832 result
, NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
2837 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2838 a normal call should be emitted rather than expanding the function
2839 in-line. EXP is the expression that is a call to the builtin
2840 function; if convenient, the result should be placed in TARGET. */
2843 expand_builtin_pow (tree exp
, rtx target
, rtx subtarget
)
2847 tree type
= TREE_TYPE (exp
);
2848 REAL_VALUE_TYPE cint
, c
, c2
;
2851 enum machine_mode mode
= TYPE_MODE (type
);
2853 if (! validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2856 arg0
= CALL_EXPR_ARG (exp
, 0);
2857 arg1
= CALL_EXPR_ARG (exp
, 1);
2859 if (TREE_CODE (arg1
) != REAL_CST
2860 || TREE_OVERFLOW (arg1
))
2861 return expand_builtin_mathfn_2 (exp
, target
, subtarget
);
2863 /* Handle constant exponents. */
2865 /* For integer valued exponents we can expand to an optimal multiplication
2866 sequence using expand_powi. */
2867 c
= TREE_REAL_CST (arg1
);
2868 n
= real_to_integer (&c
);
2869 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
2870 if (real_identical (&c
, &cint
)
2871 && ((n
>= -1 && n
<= 2)
2872 || (flag_unsafe_math_optimizations
2874 && powi_cost (n
) <= POWI_MAX_MULTS
)))
2876 op
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
2879 op
= force_reg (mode
, op
);
2880 op
= expand_powi (op
, mode
, n
);
2885 narg0
= builtin_save_expr (arg0
);
2887 /* If the exponent is not integer valued, check if it is half of an integer.
2888 In this case we can expand to sqrt (x) * x**(n/2). */
2889 fn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
2890 if (fn
!= NULL_TREE
)
2892 real_arithmetic (&c2
, MULT_EXPR
, &c
, &dconst2
);
2893 n
= real_to_integer (&c2
);
2894 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
2895 if (real_identical (&c2
, &cint
)
2896 && ((flag_unsafe_math_optimizations
2898 && powi_cost (n
/2) <= POWI_MAX_MULTS
)
2901 tree call_expr
= build_call_expr (fn
, 1, narg0
);
2902 op
= expand_builtin (call_expr
, NULL_RTX
, subtarget
, mode
, 0);
2905 op2
= expand_expr (narg0
, subtarget
, VOIDmode
, 0);
2906 op2
= force_reg (mode
, op2
);
2907 op2
= expand_powi (op2
, mode
, abs (n
/ 2));
2908 op
= expand_simple_binop (mode
, MULT
, op
, op2
, NULL_RTX
,
2909 0, OPTAB_LIB_WIDEN
);
2910 /* If the original exponent was negative, reciprocate the
2913 op
= expand_binop (mode
, sdiv_optab
, CONST1_RTX (mode
),
2914 op
, NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
2920 /* Try if the exponent is a third of an integer. In this case
2921 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2922 different from pow (x, 1./3.) due to rounding and behavior
2923 with negative x we need to constrain this transformation to
2924 unsafe math and positive x or finite math. */
2925 fn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
2927 && flag_unsafe_math_optimizations
2928 && (tree_expr_nonnegative_p (arg0
)
2929 || !HONOR_NANS (mode
)))
2931 real_arithmetic (&c2
, MULT_EXPR
, &c
, &dconst3
);
2932 real_round (&c2
, mode
, &c2
);
2933 n
= real_to_integer (&c2
);
2934 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
2935 real_arithmetic (&c2
, RDIV_EXPR
, &cint
, &dconst3
);
2936 real_convert (&c2
, mode
, &c2
);
2937 if (real_identical (&c2
, &c
)
2939 && powi_cost (n
/3) <= POWI_MAX_MULTS
)
2942 tree call_expr
= build_call_expr (fn
, 1,narg0
);
2943 op
= expand_builtin (call_expr
, NULL_RTX
, subtarget
, mode
, 0);
2944 if (abs (n
) % 3 == 2)
2945 op
= expand_simple_binop (mode
, MULT
, op
, op
, op
,
2946 0, OPTAB_LIB_WIDEN
);
2949 op2
= expand_expr (narg0
, subtarget
, VOIDmode
, 0);
2950 op2
= force_reg (mode
, op2
);
2951 op2
= expand_powi (op2
, mode
, abs (n
/ 3));
2952 op
= expand_simple_binop (mode
, MULT
, op
, op2
, NULL_RTX
,
2953 0, OPTAB_LIB_WIDEN
);
2954 /* If the original exponent was negative, reciprocate the
2957 op
= expand_binop (mode
, sdiv_optab
, CONST1_RTX (mode
),
2958 op
, NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
2964 /* Fall back to optab expansion. */
2965 return expand_builtin_mathfn_2 (exp
, target
, subtarget
);
2968 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2969 a normal call should be emitted rather than expanding the function
2970 in-line. EXP is the expression that is a call to the builtin
2971 function; if convenient, the result should be placed in TARGET. */
2974 expand_builtin_powi (tree exp
, rtx target
, rtx subtarget
)
2978 enum machine_mode mode
;
2979 enum machine_mode mode2
;
2981 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2984 arg0
= CALL_EXPR_ARG (exp
, 0);
2985 arg1
= CALL_EXPR_ARG (exp
, 1);
2986 mode
= TYPE_MODE (TREE_TYPE (exp
));
2988 /* Handle constant power. */
2990 if (TREE_CODE (arg1
) == INTEGER_CST
2991 && !TREE_OVERFLOW (arg1
))
2993 HOST_WIDE_INT n
= TREE_INT_CST_LOW (arg1
);
2995 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
2996 Otherwise, check the number of multiplications required. */
2997 if ((TREE_INT_CST_HIGH (arg1
) == 0
2998 || TREE_INT_CST_HIGH (arg1
) == -1)
2999 && ((n
>= -1 && n
<= 2)
3001 && powi_cost (n
) <= POWI_MAX_MULTS
)))
3003 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
3004 op0
= force_reg (mode
, op0
);
3005 return expand_powi (op0
, mode
, n
);
3009 /* Emit a libcall to libgcc. */
3011 /* Mode of the 2nd argument must match that of an int. */
3012 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
3014 if (target
== NULL_RTX
)
3015 target
= gen_reg_rtx (mode
);
3017 op0
= expand_expr (arg0
, subtarget
, mode
, 0);
3018 if (GET_MODE (op0
) != mode
)
3019 op0
= convert_to_mode (mode
, op0
, 0);
3020 op1
= expand_expr (arg1
, 0, mode2
, 0);
3021 if (GET_MODE (op1
) != mode2
)
3022 op1
= convert_to_mode (mode2
, op1
, 0);
3024 target
= emit_library_call_value (powi_optab
->handlers
[(int) mode
].libfunc
,
3025 target
, LCT_CONST_MAKE_BLOCK
, mode
, 2,
3026 op0
, mode
, op1
, mode2
);
3031 /* Expand expression EXP which is a call to the strlen builtin. Return
3032 NULL_RTX if we failed the caller should emit a normal call, otherwise
3033 try to get the result in TARGET, if convenient. */
3036 expand_builtin_strlen (tree exp
, rtx target
,
3037 enum machine_mode target_mode
)
3039 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
3045 tree src
= CALL_EXPR_ARG (exp
, 0);
3046 rtx result
, src_reg
, char_rtx
, before_strlen
;
3047 enum machine_mode insn_mode
= target_mode
, char_mode
;
3048 enum insn_code icode
= CODE_FOR_nothing
;
3051 /* If the length can be computed at compile-time, return it. */
3052 len
= c_strlen (src
, 0);
3054 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3056 /* If the length can be computed at compile-time and is constant
3057 integer, but there are side-effects in src, evaluate
3058 src for side-effects, then return len.
3059 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3060 can be optimized into: i++; x = 3; */
3061 len
= c_strlen (src
, 1);
3062 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
3064 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3065 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3068 align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
3070 /* If SRC is not a pointer type, don't do this operation inline. */
3074 /* Bail out if we can't compute strlen in the right mode. */
3075 while (insn_mode
!= VOIDmode
)
3077 icode
= strlen_optab
->handlers
[(int) insn_mode
].insn_code
;
3078 if (icode
!= CODE_FOR_nothing
)
3081 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
3083 if (insn_mode
== VOIDmode
)
3086 /* Make a place to write the result of the instruction. */
3090 && GET_MODE (result
) == insn_mode
3091 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3092 result
= gen_reg_rtx (insn_mode
);
3094 /* Make a place to hold the source address. We will not expand
3095 the actual source until we are sure that the expansion will
3096 not fail -- there are trees that cannot be expanded twice. */
3097 src_reg
= gen_reg_rtx (Pmode
);
3099 /* Mark the beginning of the strlen sequence so we can emit the
3100 source operand later. */
3101 before_strlen
= get_last_insn ();
3103 char_rtx
= const0_rtx
;
3104 char_mode
= insn_data
[(int) icode
].operand
[2].mode
;
3105 if (! (*insn_data
[(int) icode
].operand
[2].predicate
) (char_rtx
,
3107 char_rtx
= copy_to_mode_reg (char_mode
, char_rtx
);
3109 pat
= GEN_FCN (icode
) (result
, gen_rtx_MEM (BLKmode
, src_reg
),
3110 char_rtx
, GEN_INT (align
));
3115 /* Now that we are assured of success, expand the source. */
3117 pat
= expand_expr (src
, src_reg
, ptr_mode
, EXPAND_NORMAL
);
3119 emit_move_insn (src_reg
, pat
);
3124 emit_insn_after (pat
, before_strlen
);
3126 emit_insn_before (pat
, get_insns ());
3128 /* Return the value in the proper mode for this function. */
3129 if (GET_MODE (result
) == target_mode
)
3131 else if (target
!= 0)
3132 convert_move (target
, result
, 0);
3134 target
= convert_to_mode (target_mode
, result
, 0);
3140 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3141 caller should emit a normal call, otherwise try to get the result
3142 in TARGET, if convenient (and in mode MODE if that's convenient). */
3145 expand_builtin_strstr (tree exp
, rtx target
, enum machine_mode mode
)
3147 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3149 tree type
= TREE_TYPE (exp
);
3150 tree result
= fold_builtin_strstr (CALL_EXPR_ARG (exp
, 0),
3151 CALL_EXPR_ARG (exp
, 1), type
);
3153 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3158 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3159 caller should emit a normal call, otherwise try to get the result
3160 in TARGET, if convenient (and in mode MODE if that's convenient). */
3163 expand_builtin_strchr (tree exp
, rtx target
, enum machine_mode mode
)
3165 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3167 tree type
= TREE_TYPE (exp
);
3168 tree result
= fold_builtin_strchr (CALL_EXPR_ARG (exp
, 0),
3169 CALL_EXPR_ARG (exp
, 1), type
);
3171 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3173 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3178 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3179 caller should emit a normal call, otherwise try to get the result
3180 in TARGET, if convenient (and in mode MODE if that's convenient). */
3183 expand_builtin_strrchr (tree exp
, rtx target
, enum machine_mode mode
)
3185 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3187 tree type
= TREE_TYPE (exp
);
3188 tree result
= fold_builtin_strrchr (CALL_EXPR_ARG (exp
, 0),
3189 CALL_EXPR_ARG (exp
, 1), type
);
3191 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3196 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3197 caller should emit a normal call, otherwise try to get the result
3198 in TARGET, if convenient (and in mode MODE if that's convenient). */
3201 expand_builtin_strpbrk (tree exp
, rtx target
, enum machine_mode mode
)
3203 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3205 tree type
= TREE_TYPE (exp
);
3206 tree result
= fold_builtin_strpbrk (CALL_EXPR_ARG (exp
, 0),
3207 CALL_EXPR_ARG (exp
, 1), type
);
3209 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3214 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3215 bytes from constant string DATA + OFFSET and return it as target
3219 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3220 enum machine_mode mode
)
3222 const char *str
= (const char *) data
;
3224 gcc_assert (offset
>= 0
3225 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3226 <= strlen (str
) + 1));
3228 return c_readstr (str
+ offset
, mode
);
3231 /* Expand a call EXP to the memcpy builtin.
3232 Return NULL_RTX if we failed, the caller should emit a normal call,
3233 otherwise try to get the result in TARGET, if convenient (and in
3234 mode MODE if that's convenient). */
3237 expand_builtin_memcpy (tree exp
, rtx target
, enum machine_mode mode
)
3239 tree fndecl
= get_callee_fndecl (exp
);
3241 if (!validate_arglist (exp
,
3242 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3246 tree dest
= CALL_EXPR_ARG (exp
, 0);
3247 tree src
= CALL_EXPR_ARG (exp
, 1);
3248 tree len
= CALL_EXPR_ARG (exp
, 2);
3249 const char *src_str
;
3250 unsigned int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
3251 unsigned int dest_align
3252 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3253 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3254 tree result
= fold_builtin_memory_op (dest
, src
, len
,
3255 TREE_TYPE (TREE_TYPE (fndecl
)),
3257 HOST_WIDE_INT expected_size
= -1;
3258 unsigned int expected_align
= 0;
3262 while (TREE_CODE (result
) == COMPOUND_EXPR
)
3264 expand_expr (TREE_OPERAND (result
, 0), const0_rtx
, VOIDmode
,
3266 result
= TREE_OPERAND (result
, 1);
3268 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3271 /* If DEST is not a pointer type, call the normal function. */
3272 if (dest_align
== 0)
3275 /* If either SRC is not a pointer type, don't do this
3276 operation in-line. */
3280 stringop_block_profile (exp
, &expected_align
, &expected_size
);
3281 if (expected_align
< dest_align
)
3282 expected_align
= dest_align
;
3283 dest_mem
= get_memory_rtx (dest
, len
);
3284 set_mem_align (dest_mem
, dest_align
);
3285 len_rtx
= expand_normal (len
);
3286 src_str
= c_getstr (src
);
3288 /* If SRC is a string constant and block move would be done
3289 by pieces, we can avoid loading the string from memory
3290 and only stored the computed constants. */
3292 && GET_CODE (len_rtx
) == CONST_INT
3293 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3294 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3295 (void *) src_str
, dest_align
))
3297 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3298 builtin_memcpy_read_str
,
3299 (void *) src_str
, dest_align
, 0);
3300 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3301 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3305 src_mem
= get_memory_rtx (src
, len
);
3306 set_mem_align (src_mem
, src_align
);
3308 /* Copy word part most expediently. */
3309 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3310 CALL_EXPR_TAILCALL (exp
)
3311 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3312 expected_align
, expected_size
);
3316 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3317 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3323 /* Expand a call EXP to the mempcpy builtin.
3324 Return NULL_RTX if we failed; the caller should emit a normal call,
3325 otherwise try to get the result in TARGET, if convenient (and in
3326 mode MODE if that's convenient). If ENDP is 0 return the
3327 destination pointer, if ENDP is 1 return the end pointer ala
3328 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3332 expand_builtin_mempcpy(tree exp
, rtx target
, enum machine_mode mode
)
3334 if (!validate_arglist (exp
,
3335 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3339 tree dest
= CALL_EXPR_ARG (exp
, 0);
3340 tree src
= CALL_EXPR_ARG (exp
, 1);
3341 tree len
= CALL_EXPR_ARG (exp
, 2);
3342 return expand_builtin_mempcpy_args (dest
, src
, len
,
3344 target
, mode
, /*endp=*/ 1);
3348 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3349 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3350 so that this can also be called without constructing an actual CALL_EXPR.
3351 TYPE is the return type of the call. The other arguments and return value
3352 are the same as for expand_builtin_mempcpy. */
3355 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
, tree type
,
3356 rtx target
, enum machine_mode mode
, int endp
)
3358 /* If return value is ignored, transform mempcpy into memcpy. */
3359 if (target
== const0_rtx
)
3361 tree fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
3366 return expand_expr (build_call_expr (fn
, 3, dest
, src
, len
),
3367 target
, mode
, EXPAND_NORMAL
);
3371 const char *src_str
;
3372 unsigned int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
3373 unsigned int dest_align
3374 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3375 rtx dest_mem
, src_mem
, len_rtx
;
3376 tree result
= fold_builtin_memory_op (dest
, src
, len
, type
, false, endp
);
3380 while (TREE_CODE (result
) == COMPOUND_EXPR
)
3382 expand_expr (TREE_OPERAND (result
, 0), const0_rtx
, VOIDmode
,
3384 result
= TREE_OPERAND (result
, 1);
3386 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3389 /* If either SRC or DEST is not a pointer type, don't do this
3390 operation in-line. */
3391 if (dest_align
== 0 || src_align
== 0)
3394 /* If LEN is not constant, call the normal function. */
3395 if (! host_integerp (len
, 1))
3398 len_rtx
= expand_normal (len
);
3399 src_str
= c_getstr (src
);
3401 /* If SRC is a string constant and block move would be done
3402 by pieces, we can avoid loading the string from memory
3403 and only stored the computed constants. */
3405 && GET_CODE (len_rtx
) == CONST_INT
3406 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3407 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3408 (void *) src_str
, dest_align
))
3410 dest_mem
= get_memory_rtx (dest
, len
);
3411 set_mem_align (dest_mem
, dest_align
);
3412 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3413 builtin_memcpy_read_str
,
3414 (void *) src_str
, dest_align
, endp
);
3415 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3416 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3420 if (GET_CODE (len_rtx
) == CONST_INT
3421 && can_move_by_pieces (INTVAL (len_rtx
),
3422 MIN (dest_align
, src_align
)))
3424 dest_mem
= get_memory_rtx (dest
, len
);
3425 set_mem_align (dest_mem
, dest_align
);
3426 src_mem
= get_memory_rtx (src
, len
);
3427 set_mem_align (src_mem
, src_align
);
3428 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3429 MIN (dest_align
, src_align
), endp
);
3430 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3431 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3439 /* Expand expression EXP, which is a call to the memmove builtin. Return
3440 NULL_RTX if we failed; the caller should emit a normal call. */
3443 expand_builtin_memmove (tree exp
, rtx target
, enum machine_mode mode
, int ignore
)
3445 if (!validate_arglist (exp
,
3446 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3450 tree dest
= CALL_EXPR_ARG (exp
, 0);
3451 tree src
= CALL_EXPR_ARG (exp
, 1);
3452 tree len
= CALL_EXPR_ARG (exp
, 2);
3453 return expand_builtin_memmove_args (dest
, src
, len
, TREE_TYPE (exp
),
3454 target
, mode
, ignore
);
3458 /* Helper function to do the actual work for expand_builtin_memmove. The
3459 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3460 so that this can also be called without constructing an actual CALL_EXPR.
3461 TYPE is the return type of the call. The other arguments and return value
3462 are the same as for expand_builtin_memmove. */
3465 expand_builtin_memmove_args (tree dest
, tree src
, tree len
,
3466 tree type
, rtx target
, enum machine_mode mode
,
3469 tree result
= fold_builtin_memory_op (dest
, src
, len
, type
, ignore
, /*endp=*/3);
3473 while (TREE_CODE (result
) == COMPOUND_EXPR
)
3475 expand_expr (TREE_OPERAND (result
, 0), const0_rtx
, VOIDmode
,
3477 result
= TREE_OPERAND (result
, 1);
3479 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3482 /* Otherwise, call the normal function. */
3486 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3487 NULL_RTX if we failed the caller should emit a normal call. */
3490 expand_builtin_bcopy (tree exp
, int ignore
)
3492 tree type
= TREE_TYPE (exp
);
3493 tree src
, dest
, size
;
3495 if (!validate_arglist (exp
,
3496 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3499 src
= CALL_EXPR_ARG (exp
, 0);
3500 dest
= CALL_EXPR_ARG (exp
, 1);
3501 size
= CALL_EXPR_ARG (exp
, 2);
3503 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3504 This is done this way so that if it isn't expanded inline, we fall
3505 back to calling bcopy instead of memmove. */
3506 return expand_builtin_memmove_args (dest
, src
,
3507 fold_convert (sizetype
, size
),
3508 type
, const0_rtx
, VOIDmode
,
3513 # define HAVE_movstr 0
3514 # define CODE_FOR_movstr CODE_FOR_nothing
3517 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3518 we failed, the caller should emit a normal call, otherwise try to
3519 get the result in TARGET, if convenient. If ENDP is 0 return the
3520 destination pointer, if ENDP is 1 return the end pointer ala
3521 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3525 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3531 const struct insn_data
* data
;
3536 dest_mem
= get_memory_rtx (dest
, NULL
);
3537 src_mem
= get_memory_rtx (src
, NULL
);
3540 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3541 dest_mem
= replace_equiv_address (dest_mem
, target
);
3542 end
= gen_reg_rtx (Pmode
);
3546 if (target
== 0 || target
== const0_rtx
)
3548 end
= gen_reg_rtx (Pmode
);
3556 data
= insn_data
+ CODE_FOR_movstr
;
3558 if (data
->operand
[0].mode
!= VOIDmode
)
3559 end
= gen_lowpart (data
->operand
[0].mode
, end
);
3561 insn
= data
->genfun (end
, dest_mem
, src_mem
);
3567 /* movstr is supposed to set end to the address of the NUL
3568 terminator. If the caller requested a mempcpy-like return value,
3570 if (endp
== 1 && target
!= const0_rtx
)
3572 rtx tem
= plus_constant (gen_lowpart (GET_MODE (target
), end
), 1);
3573 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3579 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3580 NULL_RTX if we failed the caller should emit a normal call, otherwise
3581 try to get the result in TARGET, if convenient (and in mode MODE if that's
3585 expand_builtin_strcpy (tree fndecl
, tree exp
, rtx target
, enum machine_mode mode
)
3587 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3589 tree dest
= CALL_EXPR_ARG (exp
, 0);
3590 tree src
= CALL_EXPR_ARG (exp
, 1);
3591 return expand_builtin_strcpy_args (fndecl
, dest
, src
, target
, mode
);
3596 /* Helper function to do the actual work for expand_builtin_strcpy. The
3597 arguments to the builtin_strcpy call DEST and SRC are broken out
3598 so that this can also be called without constructing an actual CALL_EXPR.
3599 The other arguments and return value are the same as for
3600 expand_builtin_strcpy. */
3603 expand_builtin_strcpy_args (tree fndecl
, tree dest
, tree src
,
3604 rtx target
, enum machine_mode mode
)
3606 tree result
= fold_builtin_strcpy (fndecl
, dest
, src
, 0);
3608 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3609 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3613 /* Expand a call EXP to the stpcpy builtin.
3614 Return NULL_RTX if we failed the caller should emit a normal call,
3615 otherwise try to get the result in TARGET, if convenient (and in
3616 mode MODE if that's convenient). */
3619 expand_builtin_stpcpy (tree exp
, rtx target
, enum machine_mode mode
)
3623 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3626 dst
= CALL_EXPR_ARG (exp
, 0);
3627 src
= CALL_EXPR_ARG (exp
, 1);
3629 /* If return value is ignored, transform stpcpy into strcpy. */
3630 if (target
== const0_rtx
)
3632 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
3636 return expand_expr (build_call_expr (fn
, 2, dst
, src
),
3637 target
, mode
, EXPAND_NORMAL
);
3644 /* Ensure we get an actual string whose length can be evaluated at
3645 compile-time, not an expression containing a string. This is
3646 because the latter will potentially produce pessimized code
3647 when used to produce the return value. */
3648 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3649 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3651 lenp1
= size_binop (PLUS_EXPR
, len
, ssize_int (1));
3652 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
, TREE_TYPE (exp
),
3653 target
, mode
, /*endp=*/2);
3658 if (TREE_CODE (len
) == INTEGER_CST
)
3660 rtx len_rtx
= expand_normal (len
);
3662 if (GET_CODE (len_rtx
) == CONST_INT
)
3664 ret
= expand_builtin_strcpy_args (get_callee_fndecl (exp
),
3665 dst
, src
, target
, mode
);
3671 if (mode
!= VOIDmode
)
3672 target
= gen_reg_rtx (mode
);
3674 target
= gen_reg_rtx (GET_MODE (ret
));
3676 if (GET_MODE (target
) != GET_MODE (ret
))
3677 ret
= gen_lowpart (GET_MODE (target
), ret
);
3679 ret
= plus_constant (ret
, INTVAL (len_rtx
));
3680 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3688 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3692 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3693 bytes from constant string DATA + OFFSET and return it as target
3697 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3698 enum machine_mode mode
)
3700 const char *str
= (const char *) data
;
3702 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3705 return c_readstr (str
+ offset
, mode
);
3708 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3709 NULL_RTX if we failed the caller should emit a normal call. */
3712 expand_builtin_strncpy (tree exp
, rtx target
, enum machine_mode mode
)
3714 tree fndecl
= get_callee_fndecl (exp
);
3716 if (validate_arglist (exp
,
3717 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3719 tree dest
= CALL_EXPR_ARG (exp
, 0);
3720 tree src
= CALL_EXPR_ARG (exp
, 1);
3721 tree len
= CALL_EXPR_ARG (exp
, 2);
3722 tree slen
= c_strlen (src
, 1);
3723 tree result
= fold_builtin_strncpy (fndecl
, dest
, src
, len
, slen
);
3727 while (TREE_CODE (result
) == COMPOUND_EXPR
)
3729 expand_expr (TREE_OPERAND (result
, 0), const0_rtx
, VOIDmode
,
3731 result
= TREE_OPERAND (result
, 1);
3733 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3736 /* We must be passed a constant len and src parameter. */
3737 if (!host_integerp (len
, 1) || !slen
|| !host_integerp (slen
, 1))
3740 slen
= size_binop (PLUS_EXPR
, slen
, ssize_int (1));
3742 /* We're required to pad with trailing zeros if the requested
3743 len is greater than strlen(s2)+1. In that case try to
3744 use store_by_pieces, if it fails, punt. */
3745 if (tree_int_cst_lt (slen
, len
))
3747 unsigned int dest_align
3748 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3749 const char *p
= c_getstr (src
);
3752 if (!p
|| dest_align
== 0 || !host_integerp (len
, 1)
3753 || !can_store_by_pieces (tree_low_cst (len
, 1),
3754 builtin_strncpy_read_str
,
3755 (void *) p
, dest_align
))
3758 dest_mem
= get_memory_rtx (dest
, len
);
3759 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3760 builtin_strncpy_read_str
,
3761 (void *) p
, dest_align
, 0);
3762 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3763 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3770 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3771 bytes from constant string DATA + OFFSET and return it as target
3775 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3776 enum machine_mode mode
)
3778 const char *c
= (const char *) data
;
3779 char *p
= alloca (GET_MODE_SIZE (mode
));
3781 memset (p
, *c
, GET_MODE_SIZE (mode
));
3783 return c_readstr (p
, mode
);
3786 /* Callback routine for store_by_pieces. Return the RTL of a register
3787 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3788 char value given in the RTL register data. For example, if mode is
3789 4 bytes wide, return the RTL for 0x01010101*data. */
3792 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3793 enum machine_mode mode
)
3799 size
= GET_MODE_SIZE (mode
);
3804 memset (p
, 1, size
);
3805 coeff
= c_readstr (p
, mode
);
3807 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3808 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3809 return force_reg (mode
, target
);
3812 /* Expand expression EXP, which is a call to the memset builtin. Return
3813 NULL_RTX if we failed the caller should emit a normal call, otherwise
3814 try to get the result in TARGET, if convenient (and in mode MODE if that's
3818 expand_builtin_memset (tree exp
, rtx target
, enum machine_mode mode
)
3820 if (!validate_arglist (exp
,
3821 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3825 tree dest
= CALL_EXPR_ARG (exp
, 0);
3826 tree val
= CALL_EXPR_ARG (exp
, 1);
3827 tree len
= CALL_EXPR_ARG (exp
, 2);
3828 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3832 /* Helper function to do the actual work for expand_builtin_memset. The
3833 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3834 so that this can also be called without constructing an actual CALL_EXPR.
3835 The other arguments and return value are the same as for
3836 expand_builtin_memset. */
3839 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3840 rtx target
, enum machine_mode mode
, tree orig_exp
)
3843 enum built_in_function fcode
;
3845 unsigned int dest_align
;
3846 rtx dest_mem
, dest_addr
, len_rtx
;
3847 HOST_WIDE_INT expected_size
= -1;
3848 unsigned int expected_align
= 0;
3850 dest_align
= get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3852 /* If DEST is not a pointer type, don't do this operation in-line. */
3853 if (dest_align
== 0)
3856 stringop_block_profile (orig_exp
, &expected_align
, &expected_size
);
3857 if (expected_align
< dest_align
)
3858 expected_align
= dest_align
;
3860 /* If the LEN parameter is zero, return DEST. */
3861 if (integer_zerop (len
))
3863 /* Evaluate and ignore VAL in case it has side-effects. */
3864 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3865 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3868 /* Stabilize the arguments in case we fail. */
3869 dest
= builtin_save_expr (dest
);
3870 val
= builtin_save_expr (val
);
3871 len
= builtin_save_expr (len
);
3873 len_rtx
= expand_normal (len
);
3874 dest_mem
= get_memory_rtx (dest
, len
);
3876 if (TREE_CODE (val
) != INTEGER_CST
)
3880 val_rtx
= expand_normal (val
);
3881 val_rtx
= convert_to_mode (TYPE_MODE (unsigned_char_type_node
),
3884 /* Assume that we can memset by pieces if we can store
3885 * the coefficients by pieces (in the required modes).
3886 * We can't pass builtin_memset_gen_str as that emits RTL. */
3888 if (host_integerp (len
, 1)
3889 && !(optimize_size
&& tree_low_cst (len
, 1) > 1)
3890 && can_store_by_pieces (tree_low_cst (len
, 1),
3891 builtin_memset_read_str
, &c
, dest_align
))
3893 val_rtx
= force_reg (TYPE_MODE (unsigned_char_type_node
),
3895 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3896 builtin_memset_gen_str
, val_rtx
, dest_align
, 0);
3898 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3899 dest_align
, expected_align
,
3903 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3904 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3908 if (target_char_cast (val
, &c
))
3913 if (host_integerp (len
, 1)
3914 && !(optimize_size
&& tree_low_cst (len
, 1) > 1)
3915 && can_store_by_pieces (tree_low_cst (len
, 1),
3916 builtin_memset_read_str
, &c
, dest_align
))
3917 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3918 builtin_memset_read_str
, &c
, dest_align
, 0);
3919 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, GEN_INT (c
),
3920 dest_align
, expected_align
,
3924 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3925 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3929 set_mem_align (dest_mem
, dest_align
);
3930 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
3931 CALL_EXPR_TAILCALL (orig_exp
)
3932 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3933 expected_align
, expected_size
);
3937 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3938 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3944 fndecl
= get_callee_fndecl (orig_exp
);
3945 fcode
= DECL_FUNCTION_CODE (fndecl
);
3946 if (fcode
== BUILT_IN_MEMSET
)
3947 fn
= build_call_expr (fndecl
, 3, dest
, val
, len
);
3948 else if (fcode
== BUILT_IN_BZERO
)
3949 fn
= build_call_expr (fndecl
, 2, dest
, len
);
3952 if (TREE_CODE (fn
) == CALL_EXPR
)
3953 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
3954 return expand_call (fn
, target
, target
== const0_rtx
);
3957 /* Expand expression EXP, which is a call to the bzero builtin. Return
3958 NULL_RTX if we failed the caller should emit a normal call. */
3961 expand_builtin_bzero (tree exp
)
3965 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3968 dest
= CALL_EXPR_ARG (exp
, 0);
3969 size
= CALL_EXPR_ARG (exp
, 1);
3971 /* New argument list transforming bzero(ptr x, int y) to
3972 memset(ptr x, int 0, size_t y). This is done this way
3973 so that if it isn't expanded inline, we fallback to
3974 calling bzero instead of memset. */
3976 return expand_builtin_memset_args (dest
, integer_zero_node
,
3977 fold_convert (sizetype
, size
),
3978 const0_rtx
, VOIDmode
, exp
);
3981 /* Expand expression EXP, which is a call to the memcmp built-in function.
3982 Return NULL_RTX if we failed and the
3983 caller should emit a normal call, otherwise try to get the result in
3984 TARGET, if convenient (and in mode MODE, if that's convenient). */
3987 expand_builtin_memcmp (tree exp
, rtx target
, enum machine_mode mode
)
3989 if (!validate_arglist (exp
,
3990 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3994 tree result
= fold_builtin_memcmp (CALL_EXPR_ARG (exp
, 0),
3995 CALL_EXPR_ARG (exp
, 1),
3996 CALL_EXPR_ARG (exp
, 2));
3998 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4001 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
4003 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4006 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4007 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4008 tree len
= CALL_EXPR_ARG (exp
, 2);
4011 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4013 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4014 enum machine_mode insn_mode
;
4016 #ifdef HAVE_cmpmemsi
4018 insn_mode
= insn_data
[(int) CODE_FOR_cmpmemsi
].operand
[0].mode
;
4021 #ifdef HAVE_cmpstrnsi
4023 insn_mode
= insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4028 /* If we don't have POINTER_TYPE, call the function. */
4029 if (arg1_align
== 0 || arg2_align
== 0)
4032 /* Make a place to write the result of the instruction. */
4035 && REG_P (result
) && GET_MODE (result
) == insn_mode
4036 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4037 result
= gen_reg_rtx (insn_mode
);
4039 arg1_rtx
= get_memory_rtx (arg1
, len
);
4040 arg2_rtx
= get_memory_rtx (arg2
, len
);
4041 arg3_rtx
= expand_normal (len
);
4043 /* Set MEM_SIZE as appropriate. */
4044 if (GET_CODE (arg3_rtx
) == CONST_INT
)
4046 set_mem_size (arg1_rtx
, arg3_rtx
);
4047 set_mem_size (arg2_rtx
, arg3_rtx
);
4050 #ifdef HAVE_cmpmemsi
4052 insn
= gen_cmpmemsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4053 GEN_INT (MIN (arg1_align
, arg2_align
)));
4056 #ifdef HAVE_cmpstrnsi
4058 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4059 GEN_INT (MIN (arg1_align
, arg2_align
)));
4067 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE_MAKE_BLOCK
,
4068 TYPE_MODE (integer_type_node
), 3,
4069 XEXP (arg1_rtx
, 0), Pmode
,
4070 XEXP (arg2_rtx
, 0), Pmode
,
4071 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
4072 TYPE_UNSIGNED (sizetype
)),
4073 TYPE_MODE (sizetype
));
4075 /* Return the value in the proper mode for this function. */
4076 mode
= TYPE_MODE (TREE_TYPE (exp
));
4077 if (GET_MODE (result
) == mode
)
4079 else if (target
!= 0)
4081 convert_move (target
, result
, 0);
4085 return convert_to_mode (mode
, result
, 0);
4092 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4093 if we failed the caller should emit a normal call, otherwise try to get
4094 the result in TARGET, if convenient. */
4097 expand_builtin_strcmp (tree exp
, rtx target
, enum machine_mode mode
)
4099 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4103 tree result
= fold_builtin_strcmp (CALL_EXPR_ARG (exp
, 0),
4104 CALL_EXPR_ARG (exp
, 1));
4106 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4109 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4110 if (cmpstr_optab
[SImode
] != CODE_FOR_nothing
4111 || cmpstrn_optab
[SImode
] != CODE_FOR_nothing
)
4113 rtx arg1_rtx
, arg2_rtx
;
4114 rtx result
, insn
= NULL_RTX
;
4116 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4117 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4120 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4122 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4124 /* If we don't have POINTER_TYPE, call the function. */
4125 if (arg1_align
== 0 || arg2_align
== 0)
4128 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4129 arg1
= builtin_save_expr (arg1
);
4130 arg2
= builtin_save_expr (arg2
);
4132 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4133 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4135 #ifdef HAVE_cmpstrsi
4136 /* Try to call cmpstrsi. */
4139 enum machine_mode insn_mode
4140 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
4142 /* Make a place to write the result of the instruction. */
4145 && REG_P (result
) && GET_MODE (result
) == insn_mode
4146 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4147 result
= gen_reg_rtx (insn_mode
);
4149 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
,
4150 GEN_INT (MIN (arg1_align
, arg2_align
)));
4153 #ifdef HAVE_cmpstrnsi
4154 /* Try to determine at least one length and call cmpstrnsi. */
4155 if (!insn
&& HAVE_cmpstrnsi
)
4160 enum machine_mode insn_mode
4161 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4162 tree len1
= c_strlen (arg1
, 1);
4163 tree len2
= c_strlen (arg2
, 1);
4166 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4168 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4170 /* If we don't have a constant length for the first, use the length
4171 of the second, if we know it. We don't require a constant for
4172 this case; some cost analysis could be done if both are available
4173 but neither is constant. For now, assume they're equally cheap,
4174 unless one has side effects. If both strings have constant lengths,
4181 else if (TREE_SIDE_EFFECTS (len1
))
4183 else if (TREE_SIDE_EFFECTS (len2
))
4185 else if (TREE_CODE (len1
) != INTEGER_CST
)
4187 else if (TREE_CODE (len2
) != INTEGER_CST
)
4189 else if (tree_int_cst_lt (len1
, len2
))
4194 /* If both arguments have side effects, we cannot optimize. */
4195 if (!len
|| TREE_SIDE_EFFECTS (len
))
4198 arg3_rtx
= expand_normal (len
);
4200 /* Make a place to write the result of the instruction. */
4203 && REG_P (result
) && GET_MODE (result
) == insn_mode
4204 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4205 result
= gen_reg_rtx (insn_mode
);
4207 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4208 GEN_INT (MIN (arg1_align
, arg2_align
)));
4216 /* Return the value in the proper mode for this function. */
4217 mode
= TYPE_MODE (TREE_TYPE (exp
));
4218 if (GET_MODE (result
) == mode
)
4221 return convert_to_mode (mode
, result
, 0);
4222 convert_move (target
, result
, 0);
4226 /* Expand the library call ourselves using a stabilized argument
4227 list to avoid re-evaluating the function's arguments twice. */
4228 #ifdef HAVE_cmpstrnsi
4231 fndecl
= get_callee_fndecl (exp
);
4232 fn
= build_call_expr (fndecl
, 2, arg1
, arg2
);
4233 if (TREE_CODE (fn
) == CALL_EXPR
)
4234 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4235 return expand_call (fn
, target
, target
== const0_rtx
);
4241 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4242 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4243 the result in TARGET, if convenient. */
4246 expand_builtin_strncmp (tree exp
, rtx target
, enum machine_mode mode
)
4248 if (!validate_arglist (exp
,
4249 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4253 tree result
= fold_builtin_strncmp (CALL_EXPR_ARG (exp
, 0),
4254 CALL_EXPR_ARG (exp
, 1),
4255 CALL_EXPR_ARG (exp
, 2));
4257 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4260 /* If c_strlen can determine an expression for one of the string
4261 lengths, and it doesn't have side effects, then emit cmpstrnsi
4262 using length MIN(strlen(string)+1, arg3). */
4263 #ifdef HAVE_cmpstrnsi
4266 tree len
, len1
, len2
;
4267 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4270 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4271 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4272 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4275 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4277 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4278 enum machine_mode insn_mode
4279 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4281 len1
= c_strlen (arg1
, 1);
4282 len2
= c_strlen (arg2
, 1);
4285 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4287 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4289 /* If we don't have a constant length for the first, use the length
4290 of the second, if we know it. We don't require a constant for
4291 this case; some cost analysis could be done if both are available
4292 but neither is constant. For now, assume they're equally cheap,
4293 unless one has side effects. If both strings have constant lengths,
4300 else if (TREE_SIDE_EFFECTS (len1
))
4302 else if (TREE_SIDE_EFFECTS (len2
))
4304 else if (TREE_CODE (len1
) != INTEGER_CST
)
4306 else if (TREE_CODE (len2
) != INTEGER_CST
)
4308 else if (tree_int_cst_lt (len1
, len2
))
4313 /* If both arguments have side effects, we cannot optimize. */
4314 if (!len
|| TREE_SIDE_EFFECTS (len
))
4317 /* The actual new length parameter is MIN(len,arg3). */
4318 len
= fold_build2 (MIN_EXPR
, TREE_TYPE (len
), len
,
4319 fold_convert (TREE_TYPE (len
), arg3
));
4321 /* If we don't have POINTER_TYPE, call the function. */
4322 if (arg1_align
== 0 || arg2_align
== 0)
4325 /* Make a place to write the result of the instruction. */
4328 && REG_P (result
) && GET_MODE (result
) == insn_mode
4329 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4330 result
= gen_reg_rtx (insn_mode
);
4332 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4333 arg1
= builtin_save_expr (arg1
);
4334 arg2
= builtin_save_expr (arg2
);
4335 len
= builtin_save_expr (len
);
4337 arg1_rtx
= get_memory_rtx (arg1
, len
);
4338 arg2_rtx
= get_memory_rtx (arg2
, len
);
4339 arg3_rtx
= expand_normal (len
);
4340 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4341 GEN_INT (MIN (arg1_align
, arg2_align
)));
4346 /* Return the value in the proper mode for this function. */
4347 mode
= TYPE_MODE (TREE_TYPE (exp
));
4348 if (GET_MODE (result
) == mode
)
4351 return convert_to_mode (mode
, result
, 0);
4352 convert_move (target
, result
, 0);
4356 /* Expand the library call ourselves using a stabilized argument
4357 list to avoid re-evaluating the function's arguments twice. */
4358 fndecl
= get_callee_fndecl (exp
);
4359 fn
= build_call_expr (fndecl
, 3, arg1
, arg2
, len
);
4360 if (TREE_CODE (fn
) == CALL_EXPR
)
4361 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4362 return expand_call (fn
, target
, target
== const0_rtx
);
4368 /* Expand expression EXP, which is a call to the strcat builtin.
4369 Return NULL_RTX if we failed the caller should emit a normal call,
4370 otherwise try to get the result in TARGET, if convenient. */
4373 expand_builtin_strcat (tree fndecl
, tree exp
, rtx target
, enum machine_mode mode
)
4375 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4379 tree dst
= CALL_EXPR_ARG (exp
, 0);
4380 tree src
= CALL_EXPR_ARG (exp
, 1);
4381 const char *p
= c_getstr (src
);
4383 /* If the string length is zero, return the dst parameter. */
4384 if (p
&& *p
== '\0')
4385 return expand_expr (dst
, target
, mode
, EXPAND_NORMAL
);
4389 /* See if we can store by pieces into (dst + strlen(dst)). */
4390 tree newsrc
, newdst
,
4391 strlen_fn
= implicit_built_in_decls
[BUILT_IN_STRLEN
];
4394 /* Stabilize the argument list. */
4395 newsrc
= builtin_save_expr (src
);
4396 dst
= builtin_save_expr (dst
);
4400 /* Create strlen (dst). */
4401 newdst
= build_call_expr (strlen_fn
, 1, dst
);
4402 /* Create (dst + (cast) strlen (dst)). */
4403 newdst
= fold_convert (TREE_TYPE (dst
), newdst
);
4404 newdst
= fold_build2 (PLUS_EXPR
, TREE_TYPE (dst
), dst
, newdst
);
4406 newdst
= builtin_save_expr (newdst
);
4408 if (!expand_builtin_strcpy_args (fndecl
, newdst
, newsrc
, target
, mode
))
4410 end_sequence (); /* Stop sequence. */
4414 /* Output the entire sequence. */
4415 insns
= get_insns ();
4419 return expand_expr (dst
, target
, mode
, EXPAND_NORMAL
);
4426 /* Expand expression EXP, which is a call to the strncat builtin.
4427 Return NULL_RTX if we failed the caller should emit a normal call,
4428 otherwise try to get the result in TARGET, if convenient. */
4431 expand_builtin_strncat (tree exp
, rtx target
, enum machine_mode mode
)
4433 if (validate_arglist (exp
,
4434 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4436 tree result
= fold_builtin_strncat (CALL_EXPR_ARG (exp
, 0),
4437 CALL_EXPR_ARG (exp
, 1),
4438 CALL_EXPR_ARG (exp
, 2));
4440 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4445 /* Expand expression EXP, which is a call to the strspn builtin.
4446 Return NULL_RTX if we failed the caller should emit a normal call,
4447 otherwise try to get the result in TARGET, if convenient. */
4450 expand_builtin_strspn (tree exp
, rtx target
, enum machine_mode mode
)
4452 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4454 tree result
= fold_builtin_strspn (CALL_EXPR_ARG (exp
, 0),
4455 CALL_EXPR_ARG (exp
, 1));
4457 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4462 /* Expand expression EXP, which is a call to the strcspn builtin.
4463 Return NULL_RTX if we failed the caller should emit a normal call,
4464 otherwise try to get the result in TARGET, if convenient. */
4467 expand_builtin_strcspn (tree exp
, rtx target
, enum machine_mode mode
)
4469 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4471 tree result
= fold_builtin_strcspn (CALL_EXPR_ARG (exp
, 0),
4472 CALL_EXPR_ARG (exp
, 1));
4474 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4479 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4480 if that's convenient. */
4483 expand_builtin_saveregs (void)
4487 /* Don't do __builtin_saveregs more than once in a function.
4488 Save the result of the first call and reuse it. */
4489 if (saveregs_value
!= 0)
4490 return saveregs_value
;
4492 /* When this function is called, it means that registers must be
4493 saved on entry to this function. So we migrate the call to the
4494 first insn of this function. */
4498 /* Do whatever the machine needs done in this case. */
4499 val
= targetm
.calls
.expand_builtin_saveregs ();
4504 saveregs_value
= val
;
4506 /* Put the insns after the NOTE that starts the function. If this
4507 is inside a start_sequence, make the outer-level insn chain current, so
4508 the code is placed at the start of the function. */
4509 push_topmost_sequence ();
4510 emit_insn_after (seq
, entry_of_function ());
4511 pop_topmost_sequence ();
4516 /* __builtin_args_info (N) returns word N of the arg space info
4517 for the current function. The number and meanings of words
4518 is controlled by the definition of CUMULATIVE_ARGS. */
4521 expand_builtin_args_info (tree exp
)
4523 int nwords
= sizeof (CUMULATIVE_ARGS
) / sizeof (int);
4524 int *word_ptr
= (int *) ¤t_function_args_info
;
4526 gcc_assert (sizeof (CUMULATIVE_ARGS
) % sizeof (int) == 0);
4528 if (call_expr_nargs (exp
) != 0)
4530 if (!host_integerp (CALL_EXPR_ARG (exp
, 0), 0))
4531 error ("argument of %<__builtin_args_info%> must be constant");
4534 HOST_WIDE_INT wordnum
= tree_low_cst (CALL_EXPR_ARG (exp
, 0), 0);
4536 if (wordnum
< 0 || wordnum
>= nwords
)
4537 error ("argument of %<__builtin_args_info%> out of range");
4539 return GEN_INT (word_ptr
[wordnum
]);
4543 error ("missing argument in %<__builtin_args_info%>");
4548 /* Expand a call to __builtin_next_arg. */
4551 expand_builtin_next_arg (void)
4553 /* Checking arguments is already done in fold_builtin_next_arg
4554 that must be called before this function. */
4555 return expand_binop (Pmode
, add_optab
,
4556 current_function_internal_arg_pointer
,
4557 current_function_arg_offset_rtx
,
4558 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4561 /* Make it easier for the backends by protecting the valist argument
4562 from multiple evaluations. */
4565 stabilize_va_list (tree valist
, int needs_lvalue
)
4567 if (TREE_CODE (va_list_type_node
) == ARRAY_TYPE
)
4569 if (TREE_SIDE_EFFECTS (valist
))
4570 valist
= save_expr (valist
);
4572 /* For this case, the backends will be expecting a pointer to
4573 TREE_TYPE (va_list_type_node), but it's possible we've
4574 actually been given an array (an actual va_list_type_node).
4576 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4578 tree p1
= build_pointer_type (TREE_TYPE (va_list_type_node
));
4579 valist
= build_fold_addr_expr_with_type (valist
, p1
);
4588 if (! TREE_SIDE_EFFECTS (valist
))
4591 pt
= build_pointer_type (va_list_type_node
);
4592 valist
= fold_build1 (ADDR_EXPR
, pt
, valist
);
4593 TREE_SIDE_EFFECTS (valist
) = 1;
4596 if (TREE_SIDE_EFFECTS (valist
))
4597 valist
= save_expr (valist
);
4598 valist
= build_fold_indirect_ref (valist
);
4604 /* The "standard" definition of va_list is void*. */
4607 std_build_builtin_va_list (void)
4609 return ptr_type_node
;
4612 /* The "standard" implementation of va_start: just assign `nextarg' to
4616 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4620 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist
,
4621 make_tree (ptr_type_node
, nextarg
));
4622 TREE_SIDE_EFFECTS (t
) = 1;
4624 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4627 /* Expand EXP, a call to __builtin_va_start. */
4630 expand_builtin_va_start (tree exp
)
4635 if (call_expr_nargs (exp
) < 2)
4637 error ("too few arguments to function %<va_start%>");
4641 if (fold_builtin_next_arg (exp
, true))
4644 nextarg
= expand_builtin_next_arg ();
4645 valist
= stabilize_va_list (CALL_EXPR_ARG (exp
, 0), 1);
4647 #ifdef EXPAND_BUILTIN_VA_START
4648 EXPAND_BUILTIN_VA_START (valist
, nextarg
);
4650 std_expand_builtin_va_start (valist
, nextarg
);
4656 /* The "standard" implementation of va_arg: read the value from the
4657 current (padded) address and increment by the (padded) size. */
4660 std_gimplify_va_arg_expr (tree valist
, tree type
, tree
*pre_p
, tree
*post_p
)
4662 tree addr
, t
, type_size
, rounded_size
, valist_tmp
;
4663 unsigned HOST_WIDE_INT align
, boundary
;
4666 #ifdef ARGS_GROW_DOWNWARD
4667 /* All of the alignment and movement below is for args-grow-up machines.
4668 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4669 implement their own specialized gimplify_va_arg_expr routines. */
4673 indirect
= pass_by_reference (NULL
, TYPE_MODE (type
), type
, false);
4675 type
= build_pointer_type (type
);
4677 align
= PARM_BOUNDARY
/ BITS_PER_UNIT
;
4678 boundary
= FUNCTION_ARG_BOUNDARY (TYPE_MODE (type
), type
) / BITS_PER_UNIT
;
4680 /* Hoist the valist value into a temporary for the moment. */
4681 valist_tmp
= get_initialized_tmp_var (valist
, pre_p
, NULL
);
4683 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4684 requires greater alignment, we must perform dynamic alignment. */
4685 if (boundary
> align
4686 && !integer_zerop (TYPE_SIZE (type
)))
4688 t
= fold_convert (TREE_TYPE (valist
), size_int (boundary
- 1));
4689 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist_tmp
,
4690 build2 (PLUS_EXPR
, TREE_TYPE (valist
), valist_tmp
, t
));
4691 gimplify_and_add (t
, pre_p
);
4693 t
= fold_convert (TREE_TYPE (valist
), size_int (-boundary
));
4694 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist_tmp
,
4695 build2 (BIT_AND_EXPR
, TREE_TYPE (valist
), valist_tmp
, t
));
4696 gimplify_and_add (t
, pre_p
);
4701 /* If the actual alignment is less than the alignment of the type,
4702 adjust the type accordingly so that we don't assume strict alignment
4703 when deferencing the pointer. */
4704 boundary
*= BITS_PER_UNIT
;
4705 if (boundary
< TYPE_ALIGN (type
))
4707 type
= build_variant_type_copy (type
);
4708 TYPE_ALIGN (type
) = boundary
;
4711 /* Compute the rounded size of the type. */
4712 type_size
= size_in_bytes (type
);
4713 rounded_size
= round_up (type_size
, align
);
4715 /* Reduce rounded_size so it's sharable with the postqueue. */
4716 gimplify_expr (&rounded_size
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
4720 if (PAD_VARARGS_DOWN
&& !integer_zerop (rounded_size
))
4722 /* Small args are padded downward. */
4723 t
= fold_build2 (GT_EXPR
, sizetype
, rounded_size
, size_int (align
));
4724 t
= fold_build3 (COND_EXPR
, sizetype
, t
, size_zero_node
,
4725 size_binop (MINUS_EXPR
, rounded_size
, type_size
));
4726 t
= fold_convert (TREE_TYPE (addr
), t
);
4727 addr
= fold_build2 (PLUS_EXPR
, TREE_TYPE (addr
), addr
, t
);
4730 /* Compute new value for AP. */
4731 t
= fold_convert (TREE_TYPE (valist
), rounded_size
);
4732 t
= build2 (PLUS_EXPR
, TREE_TYPE (valist
), valist_tmp
, t
);
4733 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist
, t
);
4734 gimplify_and_add (t
, pre_p
);
4736 addr
= fold_convert (build_pointer_type (type
), addr
);
4739 addr
= build_va_arg_indirect_ref (addr
);
4741 return build_va_arg_indirect_ref (addr
);
4744 /* Build an indirect-ref expression over the given TREE, which represents a
4745 piece of a va_arg() expansion. */
4747 build_va_arg_indirect_ref (tree addr
)
4749 addr
= build_fold_indirect_ref (addr
);
4751 if (flag_mudflap
) /* Don't instrument va_arg INDIRECT_REF. */
4757 /* Return a dummy expression of type TYPE in order to keep going after an
4761 dummy_object (tree type
)
4763 tree t
= build_int_cst (build_pointer_type (type
), 0);
4764 return build1 (INDIRECT_REF
, type
, t
);
4767 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4768 builtin function, but a very special sort of operator. */
4770 enum gimplify_status
4771 gimplify_va_arg_expr (tree
*expr_p
, tree
*pre_p
, tree
*post_p
)
4773 tree promoted_type
, want_va_type
, have_va_type
;
4774 tree valist
= TREE_OPERAND (*expr_p
, 0);
4775 tree type
= TREE_TYPE (*expr_p
);
4778 /* Verify that valist is of the proper type. */
4779 want_va_type
= va_list_type_node
;
4780 have_va_type
= TREE_TYPE (valist
);
4782 if (have_va_type
== error_mark_node
)
4785 if (TREE_CODE (want_va_type
) == ARRAY_TYPE
)
4787 /* If va_list is an array type, the argument may have decayed
4788 to a pointer type, e.g. by being passed to another function.
4789 In that case, unwrap both types so that we can compare the
4790 underlying records. */
4791 if (TREE_CODE (have_va_type
) == ARRAY_TYPE
4792 || POINTER_TYPE_P (have_va_type
))
4794 want_va_type
= TREE_TYPE (want_va_type
);
4795 have_va_type
= TREE_TYPE (have_va_type
);
4799 if (TYPE_MAIN_VARIANT (want_va_type
) != TYPE_MAIN_VARIANT (have_va_type
))
4801 error ("first argument to %<va_arg%> not of type %<va_list%>");
4805 /* Generate a diagnostic for requesting data of a type that cannot
4806 be passed through `...' due to type promotion at the call site. */
4807 else if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
4810 static bool gave_help
;
4812 /* Unfortunately, this is merely undefined, rather than a constraint
4813 violation, so we cannot make this an error. If this call is never
4814 executed, the program is still strictly conforming. */
4815 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4816 type
, promoted_type
);
4820 warning (0, "(so you should pass %qT not %qT to %<va_arg%>)",
4821 promoted_type
, type
);
4824 /* We can, however, treat "undefined" any way we please.
4825 Call abort to encourage the user to fix the program. */
4826 inform ("if this code is reached, the program will abort");
4827 t
= build_call_expr (implicit_built_in_decls
[BUILT_IN_TRAP
], 0);
4828 append_to_statement_list (t
, pre_p
);
4830 /* This is dead code, but go ahead and finish so that the
4831 mode of the result comes out right. */
4832 *expr_p
= dummy_object (type
);
4837 /* Make it easier for the backends by protecting the valist argument
4838 from multiple evaluations. */
4839 if (TREE_CODE (va_list_type_node
) == ARRAY_TYPE
)
4841 /* For this case, the backends will be expecting a pointer to
4842 TREE_TYPE (va_list_type_node), but it's possible we've
4843 actually been given an array (an actual va_list_type_node).
4845 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4847 tree p1
= build_pointer_type (TREE_TYPE (va_list_type_node
));
4848 valist
= build_fold_addr_expr_with_type (valist
, p1
);
4850 gimplify_expr (&valist
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
4853 gimplify_expr (&valist
, pre_p
, post_p
, is_gimple_min_lval
, fb_lvalue
);
4855 if (!targetm
.gimplify_va_arg_expr
)
4856 /* FIXME:Once most targets are converted we should merely
4857 assert this is non-null. */
4860 *expr_p
= targetm
.gimplify_va_arg_expr (valist
, type
, pre_p
, post_p
);
4865 /* Expand EXP, a call to __builtin_va_end. */
4868 expand_builtin_va_end (tree exp
)
4870 tree valist
= CALL_EXPR_ARG (exp
, 0);
4872 /* Evaluate for side effects, if needed. I hate macros that don't
4874 if (TREE_SIDE_EFFECTS (valist
))
4875 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4880 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4881 builtin rather than just as an assignment in stdarg.h because of the
4882 nastiness of array-type va_list types. */
4885 expand_builtin_va_copy (tree exp
)
4889 dst
= CALL_EXPR_ARG (exp
, 0);
4890 src
= CALL_EXPR_ARG (exp
, 1);
4892 dst
= stabilize_va_list (dst
, 1);
4893 src
= stabilize_va_list (src
, 0);
4895 if (TREE_CODE (va_list_type_node
) != ARRAY_TYPE
)
4897 t
= build2 (MODIFY_EXPR
, va_list_type_node
, dst
, src
);
4898 TREE_SIDE_EFFECTS (t
) = 1;
4899 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4903 rtx dstb
, srcb
, size
;
4905 /* Evaluate to pointers. */
4906 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4907 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4908 size
= expand_expr (TYPE_SIZE_UNIT (va_list_type_node
), NULL_RTX
,
4909 VOIDmode
, EXPAND_NORMAL
);
4911 dstb
= convert_memory_address (Pmode
, dstb
);
4912 srcb
= convert_memory_address (Pmode
, srcb
);
4914 /* "Dereference" to BLKmode memories. */
4915 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4916 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4917 set_mem_align (dstb
, TYPE_ALIGN (va_list_type_node
));
4918 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4919 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4920 set_mem_align (srcb
, TYPE_ALIGN (va_list_type_node
));
4923 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4929 /* Expand a call to one of the builtin functions __builtin_frame_address or
4930 __builtin_return_address. */
4933 expand_builtin_frame_address (tree fndecl
, tree exp
)
4935 /* The argument must be a nonnegative integer constant.
4936 It counts the number of frames to scan up the stack.
4937 The value is the return address saved in that frame. */
4938 if (call_expr_nargs (exp
) == 0)
4939 /* Warning about missing arg was already issued. */
4941 else if (! host_integerp (CALL_EXPR_ARG (exp
, 0), 1))
4943 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4944 error ("invalid argument to %<__builtin_frame_address%>");
4946 error ("invalid argument to %<__builtin_return_address%>");
4952 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
4953 tree_low_cst (CALL_EXPR_ARG (exp
, 0), 1));
4955 /* Some ports cannot access arbitrary stack frames. */
4958 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4959 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4961 warning (0, "unsupported argument to %<__builtin_return_address%>");
4965 /* For __builtin_frame_address, return what we've got. */
4966 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4970 && ! CONSTANT_P (tem
))
4971 tem
= copy_to_mode_reg (Pmode
, tem
);
4976 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4977 we failed and the caller should emit a normal call, otherwise try to get
4978 the result in TARGET, if convenient. */
4981 expand_builtin_alloca (tree exp
, rtx target
)
4986 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
4987 should always expand to function calls. These can be intercepted
4992 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4995 /* Compute the argument. */
4996 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4998 /* Allocate the desired space. */
4999 result
= allocate_dynamic_stack_space (op0
, target
, BITS_PER_UNIT
);
5000 result
= convert_memory_address (ptr_mode
, result
);
5005 /* Expand a call to a bswap builtin with argument ARG0. MODE
5006 is the mode to expand with. */
5009 expand_builtin_bswap (tree exp
, rtx target
, rtx subtarget
)
5011 enum machine_mode mode
;
5015 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5018 arg
= CALL_EXPR_ARG (exp
, 0);
5019 mode
= TYPE_MODE (TREE_TYPE (arg
));
5020 op0
= expand_expr (arg
, subtarget
, VOIDmode
, 0);
5022 target
= expand_unop (mode
, bswap_optab
, op0
, target
, 1);
5024 gcc_assert (target
);
5026 return convert_to_mode (mode
, target
, 0);
5029 /* Expand a call to a unary builtin in EXP.
5030 Return NULL_RTX if a normal call should be emitted rather than expanding the
5031 function in-line. If convenient, the result should be placed in TARGET.
5032 SUBTARGET may be used as the target for computing one of EXP's operands. */
5035 expand_builtin_unop (enum machine_mode target_mode
, tree exp
, rtx target
,
5036 rtx subtarget
, optab op_optab
)
5040 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
5043 /* Compute the argument. */
5044 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
, VOIDmode
, 0);
5045 /* Compute op, into TARGET if possible.
5046 Set TARGET to wherever the result comes back. */
5047 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
5048 op_optab
, op0
, target
, 1);
5049 gcc_assert (target
);
5051 return convert_to_mode (target_mode
, target
, 0);
5054 /* If the string passed to fputs is a constant and is one character
5055 long, we attempt to transform this call into __builtin_fputc(). */
5058 expand_builtin_fputs (tree exp
, rtx target
, bool unlocked
)
5060 /* Verify the arguments in the original call. */
5061 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
5063 tree result
= fold_builtin_fputs (CALL_EXPR_ARG (exp
, 0),
5064 CALL_EXPR_ARG (exp
, 1),
5065 (target
== const0_rtx
),
5066 unlocked
, NULL_TREE
);
5068 return expand_expr (result
, target
, VOIDmode
, EXPAND_NORMAL
);
5073 /* Expand a call to __builtin_expect. We just return our argument
5074 as the builtin_expect semantic should've been already executed by
5075 tree branch prediction pass. */
5078 expand_builtin_expect (tree exp
, rtx target
)
5082 if (call_expr_nargs (exp
) < 2)
5084 arg
= CALL_EXPR_ARG (exp
, 0);
5085 c
= CALL_EXPR_ARG (exp
, 1);
5087 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5088 /* When guessing was done, the hints should be already stripped away. */
5089 gcc_assert (!flag_guess_branch_prob
);
5094 expand_builtin_trap (void)
5098 emit_insn (gen_trap ());
5101 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
5105 /* Expand EXP, a call to fabs, fabsf or fabsl.
5106 Return NULL_RTX if a normal call should be emitted rather than expanding
5107 the function inline. If convenient, the result should be placed
5108 in TARGET. SUBTARGET may be used as the target for computing
5112 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
5114 enum machine_mode mode
;
5118 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5121 arg
= CALL_EXPR_ARG (exp
, 0);
5122 mode
= TYPE_MODE (TREE_TYPE (arg
));
5123 op0
= expand_expr (arg
, subtarget
, VOIDmode
, 0);
5124 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
5127 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5128 Return NULL is a normal call should be emitted rather than expanding the
5129 function inline. If convenient, the result should be placed in TARGET.
5130 SUBTARGET may be used as the target for computing the operand. */
5133 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
5138 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
5141 arg
= CALL_EXPR_ARG (exp
, 0);
5142 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5144 arg
= CALL_EXPR_ARG (exp
, 1);
5145 op1
= expand_normal (arg
);
5147 return expand_copysign (op0
, op1
, target
);
5150 /* Create a new constant string literal and return a char* pointer to it.
5151 The STRING_CST value is the LEN characters at STR. */
5153 build_string_literal (int len
, const char *str
)
5155 tree t
, elem
, index
, type
;
5157 t
= build_string (len
, str
);
5158 elem
= build_type_variant (char_type_node
, 1, 0);
5159 index
= build_index_type (build_int_cst (NULL_TREE
, len
- 1));
5160 type
= build_array_type (elem
, index
);
5161 TREE_TYPE (t
) = type
;
5162 TREE_CONSTANT (t
) = 1;
5163 TREE_INVARIANT (t
) = 1;
5164 TREE_READONLY (t
) = 1;
5165 TREE_STATIC (t
) = 1;
5167 type
= build_pointer_type (type
);
5168 t
= build1 (ADDR_EXPR
, type
, t
);
5170 type
= build_pointer_type (elem
);
5171 t
= build1 (NOP_EXPR
, type
, t
);
5175 /* Expand EXP, a call to printf or printf_unlocked.
5176 Return NULL_RTX if a normal call should be emitted rather than transforming
5177 the function inline. If convenient, the result should be placed in
5178 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5181 expand_builtin_printf (tree exp
, rtx target
, enum machine_mode mode
,
5184 /* If we're using an unlocked function, assume the other unlocked
5185 functions exist explicitly. */
5186 tree
const fn_putchar
= unlocked
? built_in_decls
[BUILT_IN_PUTCHAR_UNLOCKED
]
5187 : implicit_built_in_decls
[BUILT_IN_PUTCHAR
];
5188 tree
const fn_puts
= unlocked
? built_in_decls
[BUILT_IN_PUTS_UNLOCKED
]
5189 : implicit_built_in_decls
[BUILT_IN_PUTS
];
5190 const char *fmt_str
;
5193 int nargs
= call_expr_nargs (exp
);
5195 /* If the return value is used, don't do the transformation. */
5196 if (target
!= const0_rtx
)
5199 /* Verify the required arguments in the original call. */
5202 fmt
= CALL_EXPR_ARG (exp
, 0);
5203 if (! POINTER_TYPE_P (TREE_TYPE (fmt
)))
5206 /* Check whether the format is a literal string constant. */
5207 fmt_str
= c_getstr (fmt
);
5208 if (fmt_str
== NULL
)
5211 if (!init_target_chars ())
5214 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5215 if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
5218 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp
, 1))))
5221 fn
= build_call_expr (fn_puts
, 1, CALL_EXPR_ARG (exp
, 1));
5223 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5224 else if (strcmp (fmt_str
, target_percent_c
) == 0)
5227 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1))) != INTEGER_TYPE
)
5230 fn
= build_call_expr (fn_putchar
, 1, CALL_EXPR_ARG (exp
, 1));
5234 /* We can't handle anything else with % args or %% ... yet. */
5235 if (strchr (fmt_str
, target_percent
))
5241 /* If the format specifier was "", printf does nothing. */
5242 if (fmt_str
[0] == '\0')
5244 /* If the format specifier has length of 1, call putchar. */
5245 if (fmt_str
[1] == '\0')
5247 /* Given printf("c"), (where c is any one character,)
5248 convert "c"[0] to an int and pass that to the replacement
5250 arg
= build_int_cst (NULL_TREE
, fmt_str
[0]);
5252 fn
= build_call_expr (fn_putchar
, 1, arg
);
5256 /* If the format specifier was "string\n", call puts("string"). */
5257 size_t len
= strlen (fmt_str
);
5258 if ((unsigned char)fmt_str
[len
- 1] == target_newline
)
5260 /* Create a NUL-terminated string that's one char shorter
5261 than the original, stripping off the trailing '\n'. */
5262 char *newstr
= alloca (len
);
5263 memcpy (newstr
, fmt_str
, len
- 1);
5264 newstr
[len
- 1] = 0;
5265 arg
= build_string_literal (len
, newstr
);
5267 fn
= build_call_expr (fn_puts
, 1, arg
);
5270 /* We'd like to arrange to call fputs(string,stdout) here,
5271 but we need stdout and don't have a way to get it yet. */
5278 if (TREE_CODE (fn
) == CALL_EXPR
)
5279 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
5280 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
5283 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5284 Return NULL_RTX if a normal call should be emitted rather than transforming
5285 the function inline. If convenient, the result should be placed in
5286 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5289 expand_builtin_fprintf (tree exp
, rtx target
, enum machine_mode mode
,
5292 /* If we're using an unlocked function, assume the other unlocked
5293 functions exist explicitly. */
5294 tree
const fn_fputc
= unlocked
? built_in_decls
[BUILT_IN_FPUTC_UNLOCKED
]
5295 : implicit_built_in_decls
[BUILT_IN_FPUTC
];
5296 tree
const fn_fputs
= unlocked
? built_in_decls
[BUILT_IN_FPUTS_UNLOCKED
]
5297 : implicit_built_in_decls
[BUILT_IN_FPUTS
];
5298 const char *fmt_str
;
5301 int nargs
= call_expr_nargs (exp
);
5303 /* If the return value is used, don't do the transformation. */
5304 if (target
!= const0_rtx
)
5307 /* Verify the required arguments in the original call. */
5310 fp
= CALL_EXPR_ARG (exp
, 0);
5311 if (! POINTER_TYPE_P (TREE_TYPE (fp
)))
5313 fmt
= CALL_EXPR_ARG (exp
, 1);
5314 if (! POINTER_TYPE_P (TREE_TYPE (fmt
)))
5317 /* Check whether the format is a literal string constant. */
5318 fmt_str
= c_getstr (fmt
);
5319 if (fmt_str
== NULL
)
5322 if (!init_target_chars ())
5325 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5326 if (strcmp (fmt_str
, target_percent_s
) == 0)
5329 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp
, 2))))
5331 arg
= CALL_EXPR_ARG (exp
, 2);
5333 fn
= build_call_expr (fn_fputs
, 2, arg
, fp
);
5335 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5336 else if (strcmp (fmt_str
, target_percent_c
) == 0)
5339 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 2))) != INTEGER_TYPE
)
5341 arg
= CALL_EXPR_ARG (exp
, 2);
5343 fn
= build_call_expr (fn_fputc
, 2, arg
, fp
);
5347 /* We can't handle anything else with % args or %% ... yet. */
5348 if (strchr (fmt_str
, target_percent
))
5354 /* If the format specifier was "", fprintf does nothing. */
5355 if (fmt_str
[0] == '\0')
5357 /* Evaluate and ignore FILE* argument for side-effects. */
5358 expand_expr (fp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5362 /* When "string" doesn't contain %, replace all cases of
5363 fprintf(stream,string) with fputs(string,stream). The fputs
5364 builtin will take care of special cases like length == 1. */
5366 fn
= build_call_expr (fn_fputs
, 2, fmt
, fp
);
5371 if (TREE_CODE (fn
) == CALL_EXPR
)
5372 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
5373 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
5376 /* Expand a call EXP to sprintf. Return NULL_RTX if
5377 a normal call should be emitted rather than expanding the function
5378 inline. If convenient, the result should be placed in TARGET with
5382 expand_builtin_sprintf (tree exp
, rtx target
, enum machine_mode mode
)
5385 const char *fmt_str
;
5386 int nargs
= call_expr_nargs (exp
);
5388 /* Verify the required arguments in the original call. */
5391 dest
= CALL_EXPR_ARG (exp
, 0);
5392 if (! POINTER_TYPE_P (TREE_TYPE (dest
)))
5394 fmt
= CALL_EXPR_ARG (exp
, 0);
5395 if (! POINTER_TYPE_P (TREE_TYPE (fmt
)))
5398 /* Check whether the format is a literal string constant. */
5399 fmt_str
= c_getstr (fmt
);
5400 if (fmt_str
== NULL
)
5403 if (!init_target_chars ())
5406 /* If the format doesn't contain % args or %%, use strcpy. */
5407 if (strchr (fmt_str
, target_percent
) == 0)
5409 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
5412 if ((nargs
> 2) || ! fn
)
5414 expand_expr (build_call_expr (fn
, 2, dest
, fmt
),
5415 const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5416 if (target
== const0_rtx
)
5418 exp
= build_int_cst (NULL_TREE
, strlen (fmt_str
));
5419 return expand_expr (exp
, target
, mode
, EXPAND_NORMAL
);
5421 /* If the format is "%s", use strcpy if the result isn't used. */
5422 else if (strcmp (fmt_str
, target_percent_s
) == 0)
5425 fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
5431 arg
= CALL_EXPR_ARG (exp
, 2);
5432 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
5435 if (target
!= const0_rtx
)
5437 len
= c_strlen (arg
, 1);
5438 if (! len
|| TREE_CODE (len
) != INTEGER_CST
)
5444 expand_expr (build_call_expr (fn
, 2, dest
, arg
),
5445 const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5447 if (target
== const0_rtx
)
5449 return expand_expr (len
, target
, mode
, EXPAND_NORMAL
);
5455 /* Expand a call to either the entry or exit function profiler. */
5458 expand_builtin_profile_func (bool exitp
)
5462 this = DECL_RTL (current_function_decl
);
5463 gcc_assert (MEM_P (this));
5464 this = XEXP (this, 0);
5467 which
= profile_function_exit_libfunc
;
5469 which
= profile_function_entry_libfunc
;
5471 emit_library_call (which
, LCT_NORMAL
, VOIDmode
, 2, this, Pmode
,
5472 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS
,
5479 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5482 round_trampoline_addr (rtx tramp
)
5484 rtx temp
, addend
, mask
;
5486 /* If we don't need too much alignment, we'll have been guaranteed
5487 proper alignment by get_trampoline_type. */
5488 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
5491 /* Round address up to desired boundary. */
5492 temp
= gen_reg_rtx (Pmode
);
5493 addend
= GEN_INT (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1);
5494 mask
= GEN_INT (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
);
5496 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
5497 temp
, 0, OPTAB_LIB_WIDEN
);
5498 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
5499 temp
, 0, OPTAB_LIB_WIDEN
);
5505 expand_builtin_init_trampoline (tree exp
)
5507 tree t_tramp
, t_func
, t_chain
;
5508 rtx r_tramp
, r_func
, r_chain
;
5509 #ifdef TRAMPOLINE_TEMPLATE
5513 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
5514 POINTER_TYPE
, VOID_TYPE
))
5517 t_tramp
= CALL_EXPR_ARG (exp
, 0);
5518 t_func
= CALL_EXPR_ARG (exp
, 1);
5519 t_chain
= CALL_EXPR_ARG (exp
, 2);
5521 r_tramp
= expand_normal (t_tramp
);
5522 r_func
= expand_normal (t_func
);
5523 r_chain
= expand_normal (t_chain
);
5525 /* Generate insns to initialize the trampoline. */
5526 r_tramp
= round_trampoline_addr (r_tramp
);
5527 #ifdef TRAMPOLINE_TEMPLATE
5528 blktramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
5529 set_mem_align (blktramp
, TRAMPOLINE_ALIGNMENT
);
5530 emit_block_move (blktramp
, assemble_trampoline_template (),
5531 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
5533 trampolines_created
= 1;
5534 INITIALIZE_TRAMPOLINE (r_tramp
, r_func
, r_chain
);
5540 expand_builtin_adjust_trampoline (tree exp
)
5544 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5547 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5548 tramp
= round_trampoline_addr (tramp
);
5549 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5550 TRAMPOLINE_ADJUST_ADDRESS (tramp
);
5556 /* Expand a call to the built-in signbit, signbitf, signbitl, signbitd32,
5557 signbitd64, or signbitd128 function.
5558 Return NULL_RTX if a normal call should be emitted rather than expanding
5559 the function in-line. EXP is the expression that is a call to the builtin
5560 function; if convenient, the result should be placed in TARGET. */
5563 expand_builtin_signbit (tree exp
, rtx target
)
5565 const struct real_format
*fmt
;
5566 enum machine_mode fmode
, imode
, rmode
;
5567 HOST_WIDE_INT hi
, lo
;
5572 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5575 arg
= CALL_EXPR_ARG (exp
, 0);
5576 fmode
= TYPE_MODE (TREE_TYPE (arg
));
5577 rmode
= TYPE_MODE (TREE_TYPE (exp
));
5578 fmt
= REAL_MODE_FORMAT (fmode
);
5580 /* For floating point formats without a sign bit, implement signbit
5582 bitpos
= fmt
->signbit_ro
;
5585 /* But we can't do this if the format supports signed zero. */
5586 if (fmt
->has_signed_zero
&& HONOR_SIGNED_ZEROS (fmode
))
5589 arg
= fold_build2 (LT_EXPR
, TREE_TYPE (exp
), arg
,
5590 build_real (TREE_TYPE (arg
), dconst0
));
5591 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5594 temp
= expand_normal (arg
);
5595 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
5597 imode
= int_mode_for_mode (fmode
);
5598 if (imode
== BLKmode
)
5600 temp
= gen_lowpart (imode
, temp
);
5605 /* Handle targets with different FP word orders. */
5606 if (FLOAT_WORDS_BIG_ENDIAN
)
5607 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
5609 word
= bitpos
/ BITS_PER_WORD
;
5610 temp
= operand_subword_force (temp
, word
, fmode
);
5611 bitpos
= bitpos
% BITS_PER_WORD
;
5614 /* Force the intermediate word_mode (or narrower) result into a
5615 register. This avoids attempting to create paradoxical SUBREGs
5616 of floating point modes below. */
5617 temp
= force_reg (imode
, temp
);
5619 /* If the bitpos is within the "result mode" lowpart, the operation
5620 can be implement with a single bitwise AND. Otherwise, we need
5621 a right shift and an AND. */
5623 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5625 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
5628 lo
= (HOST_WIDE_INT
) 1 << bitpos
;
5632 hi
= (HOST_WIDE_INT
) 1 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
5637 temp
= gen_lowpart (rmode
, temp
);
5638 temp
= expand_binop (rmode
, and_optab
, temp
,
5639 immed_double_const (lo
, hi
, rmode
),
5640 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5644 /* Perform a logical right shift to place the signbit in the least
5645 significant bit, then truncate the result to the desired mode
5646 and mask just this bit. */
5647 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
,
5648 build_int_cst (NULL_TREE
, bitpos
), NULL_RTX
, 1);
5649 temp
= gen_lowpart (rmode
, temp
);
5650 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5651 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5657 /* Expand fork or exec calls. TARGET is the desired target of the
5658 call. EXP is the call. FN is the
5659 identificator of the actual function. IGNORE is nonzero if the
5660 value is to be ignored. */
5663 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5668 /* If we are not profiling, just call the function. */
5669 if (!profile_arc_flag
)
5672 /* Otherwise call the wrapper. This should be equivalent for the rest of
5673 compiler, so the code does not diverge, and the wrapper may run the
5674 code necessary for keeping the profiling sane. */
5676 switch (DECL_FUNCTION_CODE (fn
))
5679 id
= get_identifier ("__gcov_fork");
5682 case BUILT_IN_EXECL
:
5683 id
= get_identifier ("__gcov_execl");
5686 case BUILT_IN_EXECV
:
5687 id
= get_identifier ("__gcov_execv");
5690 case BUILT_IN_EXECLP
:
5691 id
= get_identifier ("__gcov_execlp");
5694 case BUILT_IN_EXECLE
:
5695 id
= get_identifier ("__gcov_execle");
5698 case BUILT_IN_EXECVP
:
5699 id
= get_identifier ("__gcov_execvp");
5702 case BUILT_IN_EXECVE
:
5703 id
= get_identifier ("__gcov_execve");
5710 decl
= build_decl (FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5711 DECL_EXTERNAL (decl
) = 1;
5712 TREE_PUBLIC (decl
) = 1;
5713 DECL_ARTIFICIAL (decl
) = 1;
5714 TREE_NOTHROW (decl
) = 1;
5715 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5716 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5717 call
= rewrite_call_expr (exp
, 0, decl
, 0);
5718 return expand_call (call
, target
, ignore
);
5723 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5724 the pointer in these functions is void*, the tree optimizers may remove
5725 casts. The mode computed in expand_builtin isn't reliable either, due
5726 to __sync_bool_compare_and_swap.
5728 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5729 group of builtins. This gives us log2 of the mode size. */
5731 static inline enum machine_mode
5732 get_builtin_sync_mode (int fcode_diff
)
5734 /* The size is not negotiable, so ask not to get BLKmode in return
5735 if the target indicates that a smaller size would be better. */
5736 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
5739 /* Expand the memory expression LOC and return the appropriate memory operand
5740 for the builtin_sync operations. */
5743 get_builtin_sync_mem (tree loc
, enum machine_mode mode
)
5747 addr
= expand_expr (loc
, NULL
, Pmode
, EXPAND_SUM
);
5749 /* Note that we explicitly do not want any alias information for this
5750 memory, so that we kill all other live memories. Otherwise we don't
5751 satisfy the full barrier semantics of the intrinsic. */
5752 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5754 set_mem_align (mem
, get_pointer_alignment (loc
, BIGGEST_ALIGNMENT
));
5755 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5756 MEM_VOLATILE_P (mem
) = 1;
5761 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5762 EXP is the CALL_EXPR. CODE is the rtx code
5763 that corresponds to the arithmetic or logical operation from the name;
5764 an exception here is that NOT actually means NAND. TARGET is an optional
5765 place for us to store the results; AFTER is true if this is the
5766 fetch_and_xxx form. IGNORE is true if we don't actually care about
5767 the result of the operation at all. */
5770 expand_builtin_sync_operation (enum machine_mode mode
, tree exp
,
5771 enum rtx_code code
, bool after
,
5772 rtx target
, bool ignore
)
5775 enum machine_mode old_mode
;
5777 /* Expand the operands. */
5778 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5780 val
= expand_expr (CALL_EXPR_ARG (exp
, 1), NULL
, mode
, EXPAND_NORMAL
);
5781 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5782 of CONST_INTs, where we know the old_mode only from the call argument. */
5783 old_mode
= GET_MODE (val
);
5784 if (old_mode
== VOIDmode
)
5785 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1)));
5786 val
= convert_modes (mode
, old_mode
, val
, 1);
5789 return expand_sync_operation (mem
, val
, code
);
5791 return expand_sync_fetch_operation (mem
, val
, code
, after
, target
);
5794 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5795 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5796 true if this is the boolean form. TARGET is a place for us to store the
5797 results; this is NOT optional if IS_BOOL is true. */
5800 expand_builtin_compare_and_swap (enum machine_mode mode
, tree exp
,
5801 bool is_bool
, rtx target
)
5803 rtx old_val
, new_val
, mem
;
5804 enum machine_mode old_mode
;
5806 /* Expand the operands. */
5807 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5810 old_val
= expand_expr (CALL_EXPR_ARG (exp
, 1), NULL
, mode
, EXPAND_NORMAL
);
5811 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5812 of CONST_INTs, where we know the old_mode only from the call argument. */
5813 old_mode
= GET_MODE (old_val
);
5814 if (old_mode
== VOIDmode
)
5815 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1)));
5816 old_val
= convert_modes (mode
, old_mode
, old_val
, 1);
5818 new_val
= expand_expr (CALL_EXPR_ARG (exp
, 2), NULL
, mode
, EXPAND_NORMAL
);
5819 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5820 of CONST_INTs, where we know the old_mode only from the call argument. */
5821 old_mode
= GET_MODE (new_val
);
5822 if (old_mode
== VOIDmode
)
5823 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 2)));
5824 new_val
= convert_modes (mode
, old_mode
, new_val
, 1);
5827 return expand_bool_compare_and_swap (mem
, old_val
, new_val
, target
);
5829 return expand_val_compare_and_swap (mem
, old_val
, new_val
, target
);
5832 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5833 general form is actually an atomic exchange, and some targets only
5834 support a reduced form with the second argument being a constant 1.
5835 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5839 expand_builtin_lock_test_and_set (enum machine_mode mode
, tree exp
,
5843 enum machine_mode old_mode
;
5845 /* Expand the operands. */
5846 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5847 val
= expand_expr (CALL_EXPR_ARG (exp
, 1), NULL
, mode
, EXPAND_NORMAL
);
5848 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5849 of CONST_INTs, where we know the old_mode only from the call argument. */
5850 old_mode
= GET_MODE (val
);
5851 if (old_mode
== VOIDmode
)
5852 old_mode
= TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1)));
5853 val
= convert_modes (mode
, old_mode
, val
, 1);
5855 return expand_sync_lock_test_and_set (mem
, val
, target
);
5858 /* Expand the __sync_synchronize intrinsic. */
5861 expand_builtin_synchronize (void)
5865 #ifdef HAVE_memory_barrier
5866 if (HAVE_memory_barrier
)
5868 emit_insn (gen_memory_barrier ());
5873 /* If no explicit memory barrier instruction is available, create an
5874 empty asm stmt with a memory clobber. */
5875 x
= build4 (ASM_EXPR
, void_type_node
, build_string (0, ""), NULL
, NULL
,
5876 tree_cons (NULL
, build_string (6, "memory"), NULL
));
5877 ASM_VOLATILE_P (x
) = 1;
5878 expand_asm_expr (x
);
5881 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5884 expand_builtin_lock_release (enum machine_mode mode
, tree exp
)
5886 enum insn_code icode
;
5888 rtx val
= const0_rtx
;
5890 /* Expand the operands. */
5891 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5893 /* If there is an explicit operation in the md file, use it. */
5894 icode
= sync_lock_release
[mode
];
5895 if (icode
!= CODE_FOR_nothing
)
5897 if (!insn_data
[icode
].operand
[1].predicate (val
, mode
))
5898 val
= force_reg (mode
, val
);
5900 insn
= GEN_FCN (icode
) (mem
, val
);
5908 /* Otherwise we can implement this operation by emitting a barrier
5909 followed by a store of zero. */
5910 expand_builtin_synchronize ();
5911 emit_move_insn (mem
, val
);
5914 /* Expand an expression EXP that calls a built-in function,
5915 with result going to TARGET if that's convenient
5916 (and in mode MODE if that's convenient).
5917 SUBTARGET may be used as the target for computing one of EXP's operands.
5918 IGNORE is nonzero if the value is to be ignored. */
5921 expand_builtin (tree exp
, rtx target
, rtx subtarget
, enum machine_mode mode
,
5924 tree fndecl
= get_callee_fndecl (exp
);
5925 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5926 enum machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
5928 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5929 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
5931 /* When not optimizing, generate calls to library functions for a certain
5934 && !called_as_built_in (fndecl
)
5935 && DECL_ASSEMBLER_NAME_SET_P (fndecl
)
5936 && fcode
!= BUILT_IN_ALLOCA
)
5937 return expand_call (exp
, target
, ignore
);
5939 /* The built-in function expanders test for target == const0_rtx
5940 to determine whether the function's result will be ignored. */
5942 target
= const0_rtx
;
5944 /* If the result of a pure or const built-in function is ignored, and
5945 none of its arguments are volatile, we can avoid expanding the
5946 built-in call and just evaluate the arguments for side-effects. */
5947 if (target
== const0_rtx
5948 && (DECL_IS_PURE (fndecl
) || TREE_READONLY (fndecl
)))
5950 bool volatilep
= false;
5952 call_expr_arg_iterator iter
;
5954 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5955 if (TREE_THIS_VOLATILE (arg
))
5963 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5964 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5971 CASE_FLT_FN (BUILT_IN_FABS
):
5972 target
= expand_builtin_fabs (exp
, target
, subtarget
);
5977 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
5978 target
= expand_builtin_copysign (exp
, target
, subtarget
);
5983 /* Just do a normal library call if we were unable to fold
5985 CASE_FLT_FN (BUILT_IN_CABS
):
5988 CASE_FLT_FN (BUILT_IN_EXP
):
5989 CASE_FLT_FN (BUILT_IN_EXP10
):
5990 CASE_FLT_FN (BUILT_IN_POW10
):
5991 CASE_FLT_FN (BUILT_IN_EXP2
):
5992 CASE_FLT_FN (BUILT_IN_EXPM1
):
5993 CASE_FLT_FN (BUILT_IN_LOGB
):
5994 CASE_FLT_FN (BUILT_IN_LOG
):
5995 CASE_FLT_FN (BUILT_IN_LOG10
):
5996 CASE_FLT_FN (BUILT_IN_LOG2
):
5997 CASE_FLT_FN (BUILT_IN_LOG1P
):
5998 CASE_FLT_FN (BUILT_IN_TAN
):
5999 CASE_FLT_FN (BUILT_IN_ASIN
):
6000 CASE_FLT_FN (BUILT_IN_ACOS
):
6001 CASE_FLT_FN (BUILT_IN_ATAN
):
6002 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6003 because of possible accuracy problems. */
6004 if (! flag_unsafe_math_optimizations
)
6006 CASE_FLT_FN (BUILT_IN_SQRT
):
6007 CASE_FLT_FN (BUILT_IN_FLOOR
):
6008 CASE_FLT_FN (BUILT_IN_CEIL
):
6009 CASE_FLT_FN (BUILT_IN_TRUNC
):
6010 CASE_FLT_FN (BUILT_IN_ROUND
):
6011 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
6012 CASE_FLT_FN (BUILT_IN_RINT
):
6013 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
6018 CASE_FLT_FN (BUILT_IN_ILOGB
):
6019 if (! flag_unsafe_math_optimizations
)
6021 CASE_FLT_FN (BUILT_IN_ISINF
):
6022 target
= expand_builtin_interclass_mathfn (exp
, target
, subtarget
);
6027 CASE_FLT_FN (BUILT_IN_LCEIL
):
6028 CASE_FLT_FN (BUILT_IN_LLCEIL
):
6029 CASE_FLT_FN (BUILT_IN_LFLOOR
):
6030 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
6031 target
= expand_builtin_int_roundingfn (exp
, target
, subtarget
);
6036 CASE_FLT_FN (BUILT_IN_LRINT
):
6037 CASE_FLT_FN (BUILT_IN_LLRINT
):
6038 CASE_FLT_FN (BUILT_IN_LROUND
):
6039 CASE_FLT_FN (BUILT_IN_LLROUND
):
6040 target
= expand_builtin_int_roundingfn_2 (exp
, target
, subtarget
);
6045 CASE_FLT_FN (BUILT_IN_POW
):
6046 target
= expand_builtin_pow (exp
, target
, subtarget
);
6051 CASE_FLT_FN (BUILT_IN_POWI
):
6052 target
= expand_builtin_powi (exp
, target
, subtarget
);
6057 CASE_FLT_FN (BUILT_IN_ATAN2
):
6058 CASE_FLT_FN (BUILT_IN_LDEXP
):
6059 CASE_FLT_FN (BUILT_IN_SCALB
):
6060 CASE_FLT_FN (BUILT_IN_SCALBN
):
6061 CASE_FLT_FN (BUILT_IN_SCALBLN
):
6062 if (! flag_unsafe_math_optimizations
)
6065 CASE_FLT_FN (BUILT_IN_FMOD
):
6066 CASE_FLT_FN (BUILT_IN_REMAINDER
):
6067 CASE_FLT_FN (BUILT_IN_DREM
):
6068 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
6073 CASE_FLT_FN (BUILT_IN_CEXPI
):
6074 target
= expand_builtin_cexpi (exp
, target
, subtarget
);
6075 gcc_assert (target
);
6078 CASE_FLT_FN (BUILT_IN_SIN
):
6079 CASE_FLT_FN (BUILT_IN_COS
):
6080 if (! flag_unsafe_math_optimizations
)
6082 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
6087 CASE_FLT_FN (BUILT_IN_SINCOS
):
6088 if (! flag_unsafe_math_optimizations
)
6090 target
= expand_builtin_sincos (exp
);
6095 case BUILT_IN_APPLY_ARGS
:
6096 return expand_builtin_apply_args ();
6098 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6099 FUNCTION with a copy of the parameters described by
6100 ARGUMENTS, and ARGSIZE. It returns a block of memory
6101 allocated on the stack into which is stored all the registers
6102 that might possibly be used for returning the result of a
6103 function. ARGUMENTS is the value returned by
6104 __builtin_apply_args. ARGSIZE is the number of bytes of
6105 arguments that must be copied. ??? How should this value be
6106 computed? We'll also need a safe worst case value for varargs
6108 case BUILT_IN_APPLY
:
6109 if (!validate_arglist (exp
, POINTER_TYPE
,
6110 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
6111 && !validate_arglist (exp
, REFERENCE_TYPE
,
6112 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6118 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
6119 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
6120 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
6122 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6125 /* __builtin_return (RESULT) causes the function to return the
6126 value described by RESULT. RESULT is address of the block of
6127 memory returned by __builtin_apply. */
6128 case BUILT_IN_RETURN
:
6129 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6130 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6133 case BUILT_IN_SAVEREGS
:
6134 return expand_builtin_saveregs ();
6136 case BUILT_IN_ARGS_INFO
:
6137 return expand_builtin_args_info (exp
);
6139 /* Return the address of the first anonymous stack arg. */
6140 case BUILT_IN_NEXT_ARG
:
6141 if (fold_builtin_next_arg (exp
, false))
6143 return expand_builtin_next_arg ();
6145 case BUILT_IN_CLASSIFY_TYPE
:
6146 return expand_builtin_classify_type (exp
);
6148 case BUILT_IN_CONSTANT_P
:
6151 case BUILT_IN_FRAME_ADDRESS
:
6152 case BUILT_IN_RETURN_ADDRESS
:
6153 return expand_builtin_frame_address (fndecl
, exp
);
6155 /* Returns the address of the area where the structure is returned.
6157 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6158 if (call_expr_nargs (exp
) != 0
6159 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6160 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6163 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6165 case BUILT_IN_ALLOCA
:
6166 target
= expand_builtin_alloca (exp
, target
);
6171 case BUILT_IN_STACK_SAVE
:
6172 return expand_stack_save ();
6174 case BUILT_IN_STACK_RESTORE
:
6175 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6178 case BUILT_IN_BSWAP32
:
6179 case BUILT_IN_BSWAP64
:
6180 target
= expand_builtin_bswap (exp
, target
, subtarget
);
6186 CASE_INT_FN (BUILT_IN_FFS
):
6187 case BUILT_IN_FFSIMAX
:
6188 target
= expand_builtin_unop (target_mode
, exp
, target
,
6189 subtarget
, ffs_optab
);
6194 CASE_INT_FN (BUILT_IN_CLZ
):
6195 case BUILT_IN_CLZIMAX
:
6196 target
= expand_builtin_unop (target_mode
, exp
, target
,
6197 subtarget
, clz_optab
);
6202 CASE_INT_FN (BUILT_IN_CTZ
):
6203 case BUILT_IN_CTZIMAX
:
6204 target
= expand_builtin_unop (target_mode
, exp
, target
,
6205 subtarget
, ctz_optab
);
6210 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6211 case BUILT_IN_POPCOUNTIMAX
:
6212 target
= expand_builtin_unop (target_mode
, exp
, target
,
6213 subtarget
, popcount_optab
);
6218 CASE_INT_FN (BUILT_IN_PARITY
):
6219 case BUILT_IN_PARITYIMAX
:
6220 target
= expand_builtin_unop (target_mode
, exp
, target
,
6221 subtarget
, parity_optab
);
6226 case BUILT_IN_STRLEN
:
6227 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6232 case BUILT_IN_STRCPY
:
6233 target
= expand_builtin_strcpy (fndecl
, exp
, target
, mode
);
6238 case BUILT_IN_STRNCPY
:
6239 target
= expand_builtin_strncpy (exp
, target
, mode
);
6244 case BUILT_IN_STPCPY
:
6245 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6250 case BUILT_IN_STRCAT
:
6251 target
= expand_builtin_strcat (fndecl
, exp
, target
, mode
);
6256 case BUILT_IN_STRNCAT
:
6257 target
= expand_builtin_strncat (exp
, target
, mode
);
6262 case BUILT_IN_STRSPN
:
6263 target
= expand_builtin_strspn (exp
, target
, mode
);
6268 case BUILT_IN_STRCSPN
:
6269 target
= expand_builtin_strcspn (exp
, target
, mode
);
6274 case BUILT_IN_STRSTR
:
6275 target
= expand_builtin_strstr (exp
, target
, mode
);
6280 case BUILT_IN_STRPBRK
:
6281 target
= expand_builtin_strpbrk (exp
, target
, mode
);
6286 case BUILT_IN_INDEX
:
6287 case BUILT_IN_STRCHR
:
6288 target
= expand_builtin_strchr (exp
, target
, mode
);
6293 case BUILT_IN_RINDEX
:
6294 case BUILT_IN_STRRCHR
:
6295 target
= expand_builtin_strrchr (exp
, target
, mode
);
6300 case BUILT_IN_MEMCPY
:
6301 target
= expand_builtin_memcpy (exp
, target
, mode
);
6306 case BUILT_IN_MEMPCPY
:
6307 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6312 case BUILT_IN_MEMMOVE
:
6313 target
= expand_builtin_memmove (exp
, target
, mode
, ignore
);
6318 case BUILT_IN_BCOPY
:
6319 target
= expand_builtin_bcopy (exp
, ignore
);
6324 case BUILT_IN_MEMSET
:
6325 target
= expand_builtin_memset (exp
, target
, mode
);
6330 case BUILT_IN_BZERO
:
6331 target
= expand_builtin_bzero (exp
);
6336 case BUILT_IN_STRCMP
:
6337 target
= expand_builtin_strcmp (exp
, target
, mode
);
6342 case BUILT_IN_STRNCMP
:
6343 target
= expand_builtin_strncmp (exp
, target
, mode
);
6349 case BUILT_IN_MEMCMP
:
6350 target
= expand_builtin_memcmp (exp
, target
, mode
);
6355 case BUILT_IN_SETJMP
:
6356 /* This should have been lowered to the builtins below. */
6359 case BUILT_IN_SETJMP_SETUP
:
6360 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6361 and the receiver label. */
6362 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6364 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6365 VOIDmode
, EXPAND_NORMAL
);
6366 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6367 rtx label_r
= label_rtx (label
);
6369 /* This is copied from the handling of non-local gotos. */
6370 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6371 nonlocal_goto_handler_labels
6372 = gen_rtx_EXPR_LIST (VOIDmode
, label_r
,
6373 nonlocal_goto_handler_labels
);
6374 /* ??? Do not let expand_label treat us as such since we would
6375 not want to be both on the list of non-local labels and on
6376 the list of forced labels. */
6377 FORCED_LABEL (label
) = 0;
6382 case BUILT_IN_SETJMP_DISPATCHER
:
6383 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6384 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6386 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6387 rtx label_r
= label_rtx (label
);
6389 /* Remove the dispatcher label from the list of non-local labels
6390 since the receiver labels have been added to it above. */
6391 remove_node_from_expr_list (label_r
, &nonlocal_goto_handler_labels
);
6396 case BUILT_IN_SETJMP_RECEIVER
:
6397 /* __builtin_setjmp_receiver is passed the receiver label. */
6398 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6400 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6401 rtx label_r
= label_rtx (label
);
6403 expand_builtin_setjmp_receiver (label_r
);
6408 /* __builtin_longjmp is passed a pointer to an array of five words.
6409 It's similar to the C library longjmp function but works with
6410 __builtin_setjmp above. */
6411 case BUILT_IN_LONGJMP
:
6412 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6414 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6415 VOIDmode
, EXPAND_NORMAL
);
6416 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6418 if (value
!= const1_rtx
)
6420 error ("%<__builtin_longjmp%> second argument must be 1");
6424 expand_builtin_longjmp (buf_addr
, value
);
6429 case BUILT_IN_NONLOCAL_GOTO
:
6430 target
= expand_builtin_nonlocal_goto (exp
);
6435 /* This updates the setjmp buffer that is its argument with the value
6436 of the current stack pointer. */
6437 case BUILT_IN_UPDATE_SETJMP_BUF
:
6438 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6441 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6443 expand_builtin_update_setjmp_buf (buf_addr
);
6449 expand_builtin_trap ();
6452 case BUILT_IN_PRINTF
:
6453 target
= expand_builtin_printf (exp
, target
, mode
, false);
6458 case BUILT_IN_PRINTF_UNLOCKED
:
6459 target
= expand_builtin_printf (exp
, target
, mode
, true);
6464 case BUILT_IN_FPUTS
:
6465 target
= expand_builtin_fputs (exp
, target
, false);
6469 case BUILT_IN_FPUTS_UNLOCKED
:
6470 target
= expand_builtin_fputs (exp
, target
, true);
6475 case BUILT_IN_FPRINTF
:
6476 target
= expand_builtin_fprintf (exp
, target
, mode
, false);
6481 case BUILT_IN_FPRINTF_UNLOCKED
:
6482 target
= expand_builtin_fprintf (exp
, target
, mode
, true);
6487 case BUILT_IN_SPRINTF
:
6488 target
= expand_builtin_sprintf (exp
, target
, mode
);
6493 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6494 case BUILT_IN_SIGNBITD32
:
6495 case BUILT_IN_SIGNBITD64
:
6496 case BUILT_IN_SIGNBITD128
:
6497 target
= expand_builtin_signbit (exp
, target
);
6502 /* Various hooks for the DWARF 2 __throw routine. */
6503 case BUILT_IN_UNWIND_INIT
:
6504 expand_builtin_unwind_init ();
6506 case BUILT_IN_DWARF_CFA
:
6507 return virtual_cfa_rtx
;
6508 #ifdef DWARF2_UNWIND_INFO
6509 case BUILT_IN_DWARF_SP_COLUMN
:
6510 return expand_builtin_dwarf_sp_column ();
6511 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6512 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6515 case BUILT_IN_FROB_RETURN_ADDR
:
6516 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6517 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6518 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6519 case BUILT_IN_EH_RETURN
:
6520 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6521 CALL_EXPR_ARG (exp
, 1));
6523 #ifdef EH_RETURN_DATA_REGNO
6524 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6525 return expand_builtin_eh_return_data_regno (exp
);
6527 case BUILT_IN_EXTEND_POINTER
:
6528 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6530 case BUILT_IN_VA_START
:
6531 case BUILT_IN_STDARG_START
:
6532 return expand_builtin_va_start (exp
);
6533 case BUILT_IN_VA_END
:
6534 return expand_builtin_va_end (exp
);
6535 case BUILT_IN_VA_COPY
:
6536 return expand_builtin_va_copy (exp
);
6537 case BUILT_IN_EXPECT
:
6538 return expand_builtin_expect (exp
, target
);
6539 case BUILT_IN_PREFETCH
:
6540 expand_builtin_prefetch (exp
);
6543 case BUILT_IN_PROFILE_FUNC_ENTER
:
6544 return expand_builtin_profile_func (false);
6545 case BUILT_IN_PROFILE_FUNC_EXIT
:
6546 return expand_builtin_profile_func (true);
6548 case BUILT_IN_INIT_TRAMPOLINE
:
6549 return expand_builtin_init_trampoline (exp
);
6550 case BUILT_IN_ADJUST_TRAMPOLINE
:
6551 return expand_builtin_adjust_trampoline (exp
);
6554 case BUILT_IN_EXECL
:
6555 case BUILT_IN_EXECV
:
6556 case BUILT_IN_EXECLP
:
6557 case BUILT_IN_EXECLE
:
6558 case BUILT_IN_EXECVP
:
6559 case BUILT_IN_EXECVE
:
6560 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6565 case BUILT_IN_FETCH_AND_ADD_1
:
6566 case BUILT_IN_FETCH_AND_ADD_2
:
6567 case BUILT_IN_FETCH_AND_ADD_4
:
6568 case BUILT_IN_FETCH_AND_ADD_8
:
6569 case BUILT_IN_FETCH_AND_ADD_16
:
6570 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_ADD_1
);
6571 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
,
6572 false, target
, ignore
);
6577 case BUILT_IN_FETCH_AND_SUB_1
:
6578 case BUILT_IN_FETCH_AND_SUB_2
:
6579 case BUILT_IN_FETCH_AND_SUB_4
:
6580 case BUILT_IN_FETCH_AND_SUB_8
:
6581 case BUILT_IN_FETCH_AND_SUB_16
:
6582 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_SUB_1
);
6583 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
,
6584 false, target
, ignore
);
6589 case BUILT_IN_FETCH_AND_OR_1
:
6590 case BUILT_IN_FETCH_AND_OR_2
:
6591 case BUILT_IN_FETCH_AND_OR_4
:
6592 case BUILT_IN_FETCH_AND_OR_8
:
6593 case BUILT_IN_FETCH_AND_OR_16
:
6594 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_OR_1
);
6595 target
= expand_builtin_sync_operation (mode
, exp
, IOR
,
6596 false, target
, ignore
);
6601 case BUILT_IN_FETCH_AND_AND_1
:
6602 case BUILT_IN_FETCH_AND_AND_2
:
6603 case BUILT_IN_FETCH_AND_AND_4
:
6604 case BUILT_IN_FETCH_AND_AND_8
:
6605 case BUILT_IN_FETCH_AND_AND_16
:
6606 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_AND_1
);
6607 target
= expand_builtin_sync_operation (mode
, exp
, AND
,
6608 false, target
, ignore
);
6613 case BUILT_IN_FETCH_AND_XOR_1
:
6614 case BUILT_IN_FETCH_AND_XOR_2
:
6615 case BUILT_IN_FETCH_AND_XOR_4
:
6616 case BUILT_IN_FETCH_AND_XOR_8
:
6617 case BUILT_IN_FETCH_AND_XOR_16
:
6618 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_XOR_1
);
6619 target
= expand_builtin_sync_operation (mode
, exp
, XOR
,
6620 false, target
, ignore
);
6625 case BUILT_IN_FETCH_AND_NAND_1
:
6626 case BUILT_IN_FETCH_AND_NAND_2
:
6627 case BUILT_IN_FETCH_AND_NAND_4
:
6628 case BUILT_IN_FETCH_AND_NAND_8
:
6629 case BUILT_IN_FETCH_AND_NAND_16
:
6630 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_NAND_1
);
6631 target
= expand_builtin_sync_operation (mode
, exp
, NOT
,
6632 false, target
, ignore
);
6637 case BUILT_IN_ADD_AND_FETCH_1
:
6638 case BUILT_IN_ADD_AND_FETCH_2
:
6639 case BUILT_IN_ADD_AND_FETCH_4
:
6640 case BUILT_IN_ADD_AND_FETCH_8
:
6641 case BUILT_IN_ADD_AND_FETCH_16
:
6642 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ADD_AND_FETCH_1
);
6643 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
,
6644 true, target
, ignore
);
6649 case BUILT_IN_SUB_AND_FETCH_1
:
6650 case BUILT_IN_SUB_AND_FETCH_2
:
6651 case BUILT_IN_SUB_AND_FETCH_4
:
6652 case BUILT_IN_SUB_AND_FETCH_8
:
6653 case BUILT_IN_SUB_AND_FETCH_16
:
6654 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SUB_AND_FETCH_1
);
6655 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
,
6656 true, target
, ignore
);
6661 case BUILT_IN_OR_AND_FETCH_1
:
6662 case BUILT_IN_OR_AND_FETCH_2
:
6663 case BUILT_IN_OR_AND_FETCH_4
:
6664 case BUILT_IN_OR_AND_FETCH_8
:
6665 case BUILT_IN_OR_AND_FETCH_16
:
6666 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_OR_AND_FETCH_1
);
6667 target
= expand_builtin_sync_operation (mode
, exp
, IOR
,
6668 true, target
, ignore
);
6673 case BUILT_IN_AND_AND_FETCH_1
:
6674 case BUILT_IN_AND_AND_FETCH_2
:
6675 case BUILT_IN_AND_AND_FETCH_4
:
6676 case BUILT_IN_AND_AND_FETCH_8
:
6677 case BUILT_IN_AND_AND_FETCH_16
:
6678 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_AND_AND_FETCH_1
);
6679 target
= expand_builtin_sync_operation (mode
, exp
, AND
,
6680 true, target
, ignore
);
6685 case BUILT_IN_XOR_AND_FETCH_1
:
6686 case BUILT_IN_XOR_AND_FETCH_2
:
6687 case BUILT_IN_XOR_AND_FETCH_4
:
6688 case BUILT_IN_XOR_AND_FETCH_8
:
6689 case BUILT_IN_XOR_AND_FETCH_16
:
6690 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_XOR_AND_FETCH_1
);
6691 target
= expand_builtin_sync_operation (mode
, exp
, XOR
,
6692 true, target
, ignore
);
6697 case BUILT_IN_NAND_AND_FETCH_1
:
6698 case BUILT_IN_NAND_AND_FETCH_2
:
6699 case BUILT_IN_NAND_AND_FETCH_4
:
6700 case BUILT_IN_NAND_AND_FETCH_8
:
6701 case BUILT_IN_NAND_AND_FETCH_16
:
6702 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_NAND_AND_FETCH_1
);
6703 target
= expand_builtin_sync_operation (mode
, exp
, NOT
,
6704 true, target
, ignore
);
6709 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1
:
6710 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2
:
6711 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4
:
6712 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8
:
6713 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16
:
6714 if (mode
== VOIDmode
)
6715 mode
= TYPE_MODE (boolean_type_node
);
6716 if (!target
|| !register_operand (target
, mode
))
6717 target
= gen_reg_rtx (mode
);
6719 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_BOOL_COMPARE_AND_SWAP_1
);
6720 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6725 case BUILT_IN_VAL_COMPARE_AND_SWAP_1
:
6726 case BUILT_IN_VAL_COMPARE_AND_SWAP_2
:
6727 case BUILT_IN_VAL_COMPARE_AND_SWAP_4
:
6728 case BUILT_IN_VAL_COMPARE_AND_SWAP_8
:
6729 case BUILT_IN_VAL_COMPARE_AND_SWAP_16
:
6730 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_VAL_COMPARE_AND_SWAP_1
);
6731 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6736 case BUILT_IN_LOCK_TEST_AND_SET_1
:
6737 case BUILT_IN_LOCK_TEST_AND_SET_2
:
6738 case BUILT_IN_LOCK_TEST_AND_SET_4
:
6739 case BUILT_IN_LOCK_TEST_AND_SET_8
:
6740 case BUILT_IN_LOCK_TEST_AND_SET_16
:
6741 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_LOCK_TEST_AND_SET_1
);
6742 target
= expand_builtin_lock_test_and_set (mode
, exp
, target
);
6747 case BUILT_IN_LOCK_RELEASE_1
:
6748 case BUILT_IN_LOCK_RELEASE_2
:
6749 case BUILT_IN_LOCK_RELEASE_4
:
6750 case BUILT_IN_LOCK_RELEASE_8
:
6751 case BUILT_IN_LOCK_RELEASE_16
:
6752 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_LOCK_RELEASE_1
);
6753 expand_builtin_lock_release (mode
, exp
);
6756 case BUILT_IN_SYNCHRONIZE
:
6757 expand_builtin_synchronize ();
6760 case BUILT_IN_OBJECT_SIZE
:
6761 return expand_builtin_object_size (exp
);
6763 case BUILT_IN_MEMCPY_CHK
:
6764 case BUILT_IN_MEMPCPY_CHK
:
6765 case BUILT_IN_MEMMOVE_CHK
:
6766 case BUILT_IN_MEMSET_CHK
:
6767 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
6772 case BUILT_IN_STRCPY_CHK
:
6773 case BUILT_IN_STPCPY_CHK
:
6774 case BUILT_IN_STRNCPY_CHK
:
6775 case BUILT_IN_STRCAT_CHK
:
6776 case BUILT_IN_STRNCAT_CHK
:
6777 case BUILT_IN_SNPRINTF_CHK
:
6778 case BUILT_IN_VSNPRINTF_CHK
:
6779 maybe_emit_chk_warning (exp
, fcode
);
6782 case BUILT_IN_SPRINTF_CHK
:
6783 case BUILT_IN_VSPRINTF_CHK
:
6784 maybe_emit_sprintf_chk_warning (exp
, fcode
);
6787 default: /* just do library call, if unknown builtin */
6791 /* The switch statement above can drop through to cause the function
6792 to be called normally. */
6793 return expand_call (exp
, target
, ignore
);
6796 /* Determine whether a tree node represents a call to a built-in
6797 function. If the tree T is a call to a built-in function with
6798 the right number of arguments of the appropriate types, return
6799 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6800 Otherwise the return value is END_BUILTINS. */
6802 enum built_in_function
6803 builtin_mathfn_code (tree t
)
6805 tree fndecl
, arg
, parmlist
;
6806 tree argtype
, parmtype
;
6807 call_expr_arg_iterator iter
;
6809 if (TREE_CODE (t
) != CALL_EXPR
6810 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
6811 return END_BUILTINS
;
6813 fndecl
= get_callee_fndecl (t
);
6814 if (fndecl
== NULL_TREE
6815 || TREE_CODE (fndecl
) != FUNCTION_DECL
6816 || ! DECL_BUILT_IN (fndecl
)
6817 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6818 return END_BUILTINS
;
6820 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
6821 init_call_expr_arg_iterator (t
, &iter
);
6822 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
6824 /* If a function doesn't take a variable number of arguments,
6825 the last element in the list will have type `void'. */
6826 parmtype
= TREE_VALUE (parmlist
);
6827 if (VOID_TYPE_P (parmtype
))
6829 if (more_call_expr_args_p (&iter
))
6830 return END_BUILTINS
;
6831 return DECL_FUNCTION_CODE (fndecl
);
6834 if (! more_call_expr_args_p (&iter
))
6835 return END_BUILTINS
;
6837 arg
= next_call_expr_arg (&iter
);
6838 argtype
= TREE_TYPE (arg
);
6840 if (SCALAR_FLOAT_TYPE_P (parmtype
))
6842 if (! SCALAR_FLOAT_TYPE_P (argtype
))
6843 return END_BUILTINS
;
6845 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
6847 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
6848 return END_BUILTINS
;
6850 else if (POINTER_TYPE_P (parmtype
))
6852 if (! POINTER_TYPE_P (argtype
))
6853 return END_BUILTINS
;
6855 else if (INTEGRAL_TYPE_P (parmtype
))
6857 if (! INTEGRAL_TYPE_P (argtype
))
6858 return END_BUILTINS
;
6861 return END_BUILTINS
;
6864 /* Variable-length argument list. */
6865 return DECL_FUNCTION_CODE (fndecl
);
6868 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6869 evaluate to a constant. */
6872 fold_builtin_constant_p (tree arg
)
6874 /* We return 1 for a numeric type that's known to be a constant
6875 value at compile-time or for an aggregate type that's a
6876 literal constant. */
6879 /* If we know this is a constant, emit the constant of one. */
6880 if (CONSTANT_CLASS_P (arg
)
6881 || (TREE_CODE (arg
) == CONSTRUCTOR
6882 && TREE_CONSTANT (arg
)))
6883 return integer_one_node
;
6884 if (TREE_CODE (arg
) == ADDR_EXPR
)
6886 tree op
= TREE_OPERAND (arg
, 0);
6887 if (TREE_CODE (op
) == STRING_CST
6888 || (TREE_CODE (op
) == ARRAY_REF
6889 && integer_zerop (TREE_OPERAND (op
, 1))
6890 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
6891 return integer_one_node
;
6894 /* If this expression has side effects, show we don't know it to be a
6895 constant. Likewise if it's a pointer or aggregate type since in
6896 those case we only want literals, since those are only optimized
6897 when generating RTL, not later.
6898 And finally, if we are compiling an initializer, not code, we
6899 need to return a definite result now; there's not going to be any
6900 more optimization done. */
6901 if (TREE_SIDE_EFFECTS (arg
)
6902 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
6903 || POINTER_TYPE_P (TREE_TYPE (arg
))
6905 || folding_initializer
)
6906 return integer_zero_node
;
6911 /* Fold a call to __builtin_expect with argument ARG, if we expect that a
6912 comparison against the argument will fold to a constant. In practice,
6913 this means a true constant or the address of a non-weak symbol. */
6916 fold_builtin_expect (tree arg
)
6920 /* If the argument isn't invariant, then there's nothing we can do. */
6921 if (!TREE_INVARIANT (arg
))
6924 /* If we're looking at an address of a weak decl, then do not fold. */
6927 if (TREE_CODE (inner
) == ADDR_EXPR
)
6931 inner
= TREE_OPERAND (inner
, 0);
6933 while (TREE_CODE (inner
) == COMPONENT_REF
6934 || TREE_CODE (inner
) == ARRAY_REF
);
6935 if (DECL_P (inner
) && DECL_WEAK (inner
))
6939 /* Otherwise, ARG already has the proper type for the return value. */
6943 /* Fold a call to __builtin_classify_type with argument ARG. */
6946 fold_builtin_classify_type (tree arg
)
6949 return build_int_cst (NULL_TREE
, no_type_class
);
6951 return build_int_cst (NULL_TREE
, type_to_class (TREE_TYPE (arg
)));
6954 /* Fold a call to __builtin_strlen with argument ARG. */
6957 fold_builtin_strlen (tree arg
)
6959 if (!validate_arg (arg
, POINTER_TYPE
))
6963 tree len
= c_strlen (arg
, 0);
6967 /* Convert from the internal "sizetype" type to "size_t". */
6969 len
= fold_convert (size_type_node
, len
);
6977 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6980 fold_builtin_inf (tree type
, int warn
)
6982 REAL_VALUE_TYPE real
;
6984 /* __builtin_inff is intended to be usable to define INFINITY on all
6985 targets. If an infinity is not available, INFINITY expands "to a
6986 positive constant of type float that overflows at translation
6987 time", footnote "In this case, using INFINITY will violate the
6988 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6989 Thus we pedwarn to ensure this constraint violation is
6991 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
6992 pedwarn ("target format does not support infinity");
6995 return build_real (type
, real
);
6998 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7001 fold_builtin_nan (tree arg
, tree type
, int quiet
)
7003 REAL_VALUE_TYPE real
;
7006 if (!validate_arg (arg
, POINTER_TYPE
))
7008 str
= c_getstr (arg
);
7012 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
7015 return build_real (type
, real
);
7018 /* Return true if the floating point expression T has an integer value.
7019 We also allow +Inf, -Inf and NaN to be considered integer values. */
7022 integer_valued_real_p (tree t
)
7024 switch (TREE_CODE (t
))
7031 case NON_LVALUE_EXPR
:
7032 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7037 return integer_valued_real_p (GENERIC_TREE_OPERAND (t
, 1));
7044 return integer_valued_real_p (TREE_OPERAND (t
, 0))
7045 && integer_valued_real_p (TREE_OPERAND (t
, 1));
7048 return integer_valued_real_p (TREE_OPERAND (t
, 1))
7049 && integer_valued_real_p (TREE_OPERAND (t
, 2));
7052 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
7056 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
7057 if (TREE_CODE (type
) == INTEGER_TYPE
)
7059 if (TREE_CODE (type
) == REAL_TYPE
)
7060 return integer_valued_real_p (TREE_OPERAND (t
, 0));
7065 switch (builtin_mathfn_code (t
))
7067 CASE_FLT_FN (BUILT_IN_CEIL
):
7068 CASE_FLT_FN (BUILT_IN_FLOOR
):
7069 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
7070 CASE_FLT_FN (BUILT_IN_RINT
):
7071 CASE_FLT_FN (BUILT_IN_ROUND
):
7072 CASE_FLT_FN (BUILT_IN_TRUNC
):
7075 CASE_FLT_FN (BUILT_IN_FMIN
):
7076 CASE_FLT_FN (BUILT_IN_FMAX
):
7077 return integer_valued_real_p (CALL_EXPR_ARG (t
, 0))
7078 && integer_valued_real_p (CALL_EXPR_ARG (t
, 1));
7091 /* FNDECL is assumed to be a builtin where truncation can be propagated
7092 across (for instance floor((double)f) == (double)floorf (f).
7093 Do the transformation for a call with argument ARG. */
7096 fold_trunc_transparent_mathfn (tree fndecl
, tree arg
)
7098 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7100 if (!validate_arg (arg
, REAL_TYPE
))
7103 /* Integer rounding functions are idempotent. */
7104 if (fcode
== builtin_mathfn_code (arg
))
7107 /* If argument is already integer valued, and we don't need to worry
7108 about setting errno, there's no need to perform rounding. */
7109 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7114 tree arg0
= strip_float_extensions (arg
);
7115 tree ftype
= TREE_TYPE (TREE_TYPE (fndecl
));
7116 tree newtype
= TREE_TYPE (arg0
);
7119 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7120 && (decl
= mathfn_built_in (newtype
, fcode
)))
7121 return fold_convert (ftype
,
7122 build_call_expr (decl
, 1,
7123 fold_convert (newtype
, arg0
)));
7128 /* FNDECL is assumed to be builtin which can narrow the FP type of
7129 the argument, for instance lround((double)f) -> lroundf (f).
7130 Do the transformation for a call with argument ARG. */
7133 fold_fixed_mathfn (tree fndecl
, tree arg
)
7135 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7137 if (!validate_arg (arg
, REAL_TYPE
))
7140 /* If argument is already integer valued, and we don't need to worry
7141 about setting errno, there's no need to perform rounding. */
7142 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7143 return fold_build1 (FIX_TRUNC_EXPR
, TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7147 tree ftype
= TREE_TYPE (arg
);
7148 tree arg0
= strip_float_extensions (arg
);
7149 tree newtype
= TREE_TYPE (arg0
);
7152 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7153 && (decl
= mathfn_built_in (newtype
, fcode
)))
7154 return build_call_expr (decl
, 1, fold_convert (newtype
, arg0
));
7157 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7158 sizeof (long long) == sizeof (long). */
7159 if (TYPE_PRECISION (long_long_integer_type_node
)
7160 == TYPE_PRECISION (long_integer_type_node
))
7162 tree newfn
= NULL_TREE
;
7165 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7166 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7169 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7170 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7173 CASE_FLT_FN (BUILT_IN_LLROUND
):
7174 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7177 CASE_FLT_FN (BUILT_IN_LLRINT
):
7178 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7187 tree newcall
= build_call_expr(newfn
, 1, arg
);
7188 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7195 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7196 return type. Return NULL_TREE if no simplification can be made. */
7199 fold_builtin_cabs (tree arg
, tree type
, tree fndecl
)
7203 if (TREE_CODE (TREE_TYPE (arg
)) != COMPLEX_TYPE
7204 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7207 /* Calculate the result when the argument is a constant. */
7208 if (TREE_CODE (arg
) == COMPLEX_CST
7209 && (res
= do_mpfr_arg2 (TREE_REALPART (arg
), TREE_IMAGPART (arg
),
7213 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7215 tree real
= TREE_OPERAND (arg
, 0);
7216 tree imag
= TREE_OPERAND (arg
, 1);
7218 /* If either part is zero, cabs is fabs of the other. */
7219 if (real_zerop (real
))
7220 return fold_build1 (ABS_EXPR
, type
, imag
);
7221 if (real_zerop (imag
))
7222 return fold_build1 (ABS_EXPR
, type
, real
);
7224 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7225 if (flag_unsafe_math_optimizations
7226 && operand_equal_p (real
, imag
, OEP_PURE_SAME
))
7228 const REAL_VALUE_TYPE sqrt2_trunc
7229 = real_value_truncate (TYPE_MODE (type
), dconstsqrt2
);
7231 return fold_build2 (MULT_EXPR
, type
,
7232 fold_build1 (ABS_EXPR
, type
, real
),
7233 build_real (type
, sqrt2_trunc
));
7237 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7238 if (TREE_CODE (arg
) == NEGATE_EXPR
7239 || TREE_CODE (arg
) == CONJ_EXPR
)
7240 return build_call_expr (fndecl
, 1, TREE_OPERAND (arg
, 0));
7242 /* Don't do this when optimizing for size. */
7243 if (flag_unsafe_math_optimizations
7244 && optimize
&& !optimize_size
)
7246 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
7248 if (sqrtfn
!= NULL_TREE
)
7250 tree rpart
, ipart
, result
;
7252 arg
= builtin_save_expr (arg
);
7254 rpart
= fold_build1 (REALPART_EXPR
, type
, arg
);
7255 ipart
= fold_build1 (IMAGPART_EXPR
, type
, arg
);
7257 rpart
= builtin_save_expr (rpart
);
7258 ipart
= builtin_save_expr (ipart
);
7260 result
= fold_build2 (PLUS_EXPR
, type
,
7261 fold_build2 (MULT_EXPR
, type
,
7263 fold_build2 (MULT_EXPR
, type
,
7266 return build_call_expr (sqrtfn
, 1, result
);
7273 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7274 Return NULL_TREE if no simplification can be made. */
7277 fold_builtin_sqrt (tree arg
, tree type
)
7280 enum built_in_function fcode
;
7283 if (!validate_arg (arg
, REAL_TYPE
))
7286 /* Calculate the result when the argument is a constant. */
7287 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_sqrt
, &dconst0
, NULL
, true)))
7290 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7291 fcode
= builtin_mathfn_code (arg
);
7292 if (flag_unsafe_math_optimizations
&& BUILTIN_EXPONENT_P (fcode
))
7294 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7295 arg
= fold_build2 (MULT_EXPR
, type
,
7296 CALL_EXPR_ARG (arg
, 0),
7297 build_real (type
, dconsthalf
));
7298 return build_call_expr (expfn
, 1, arg
);
7301 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7302 if (flag_unsafe_math_optimizations
&& BUILTIN_ROOT_P (fcode
))
7304 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7308 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7310 /* The inner root was either sqrt or cbrt. */
7311 REAL_VALUE_TYPE dconstroot
=
7312 BUILTIN_SQRT_P (fcode
) ? dconsthalf
: dconstthird
;
7314 /* Adjust for the outer root. */
7315 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7316 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7317 tree_root
= build_real (type
, dconstroot
);
7318 return build_call_expr (powfn
, 2, arg0
, tree_root
);
7322 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7323 if (flag_unsafe_math_optimizations
7324 && (fcode
== BUILT_IN_POW
7325 || fcode
== BUILT_IN_POWF
7326 || fcode
== BUILT_IN_POWL
))
7328 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7329 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7330 tree arg1
= CALL_EXPR_ARG (arg
, 1);
7332 if (!tree_expr_nonnegative_p (arg0
))
7333 arg0
= build1 (ABS_EXPR
, type
, arg0
);
7334 narg1
= fold_build2 (MULT_EXPR
, type
, arg1
,
7335 build_real (type
, dconsthalf
));
7336 return build_call_expr (powfn
, 2, arg0
, narg1
);
7342 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7343 Return NULL_TREE if no simplification can be made. */
7346 fold_builtin_cbrt (tree arg
, tree type
)
7348 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7351 if (!validate_arg (arg
, REAL_TYPE
))
7354 /* Calculate the result when the argument is a constant. */
7355 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cbrt
, NULL
, NULL
, 0)))
7358 if (flag_unsafe_math_optimizations
)
7360 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7361 if (BUILTIN_EXPONENT_P (fcode
))
7363 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7364 const REAL_VALUE_TYPE third_trunc
=
7365 real_value_truncate (TYPE_MODE (type
), dconstthird
);
7366 arg
= fold_build2 (MULT_EXPR
, type
,
7367 CALL_EXPR_ARG (arg
, 0),
7368 build_real (type
, third_trunc
));
7369 return build_call_expr (expfn
, 1, arg
);
7372 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7373 if (BUILTIN_SQRT_P (fcode
))
7375 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7379 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7381 REAL_VALUE_TYPE dconstroot
= dconstthird
;
7383 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7384 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7385 tree_root
= build_real (type
, dconstroot
);
7386 return build_call_expr (powfn
, 2, arg0
, tree_root
);
7390 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7391 if (BUILTIN_CBRT_P (fcode
))
7393 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7394 if (tree_expr_nonnegative_p (arg0
))
7396 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7401 REAL_VALUE_TYPE dconstroot
;
7403 real_arithmetic (&dconstroot
, MULT_EXPR
, &dconstthird
, &dconstthird
);
7404 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7405 tree_root
= build_real (type
, dconstroot
);
7406 return build_call_expr (powfn
, 2, arg0
, tree_root
);
7411 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7412 if (fcode
== BUILT_IN_POW
7413 || fcode
== BUILT_IN_POWF
7414 || fcode
== BUILT_IN_POWL
)
7416 tree arg00
= CALL_EXPR_ARG (arg
, 0);
7417 tree arg01
= CALL_EXPR_ARG (arg
, 1);
7418 if (tree_expr_nonnegative_p (arg00
))
7420 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7421 const REAL_VALUE_TYPE dconstroot
7422 = real_value_truncate (TYPE_MODE (type
), dconstthird
);
7423 tree narg01
= fold_build2 (MULT_EXPR
, type
, arg01
,
7424 build_real (type
, dconstroot
));
7425 return build_call_expr (powfn
, 2, arg00
, narg01
);
7432 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7433 TYPE is the type of the return value. Return NULL_TREE if no
7434 simplification can be made. */
7437 fold_builtin_cos (tree arg
, tree type
, tree fndecl
)
7441 if (!validate_arg (arg
, REAL_TYPE
))
7444 /* Calculate the result when the argument is a constant. */
7445 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cos
, NULL
, NULL
, 0)))
7448 /* Optimize cos(-x) into cos (x). */
7449 if ((narg
= fold_strip_sign_ops (arg
)))
7450 return build_call_expr (fndecl
, 1, narg
);
7455 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7456 Return NULL_TREE if no simplification can be made. */
7459 fold_builtin_cosh (tree arg
, tree type
, tree fndecl
)
7461 if (validate_arg (arg
, REAL_TYPE
))
7465 /* Calculate the result when the argument is a constant. */
7466 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cosh
, NULL
, NULL
, 0)))
7469 /* Optimize cosh(-x) into cosh (x). */
7470 if ((narg
= fold_strip_sign_ops (arg
)))
7471 return build_call_expr (fndecl
, 1, narg
);
7477 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7478 Return NULL_TREE if no simplification can be made. */
7481 fold_builtin_tan (tree arg
, tree type
)
7483 enum built_in_function fcode
;
7486 if (!validate_arg (arg
, REAL_TYPE
))
7489 /* Calculate the result when the argument is a constant. */
7490 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_tan
, NULL
, NULL
, 0)))
7493 /* Optimize tan(atan(x)) = x. */
7494 fcode
= builtin_mathfn_code (arg
);
7495 if (flag_unsafe_math_optimizations
7496 && (fcode
== BUILT_IN_ATAN
7497 || fcode
== BUILT_IN_ATANF
7498 || fcode
== BUILT_IN_ATANL
))
7499 return CALL_EXPR_ARG (arg
, 0);
7504 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7505 NULL_TREE if no simplification can be made. */
7508 fold_builtin_sincos (tree arg0
, tree arg1
, tree arg2
)
7513 if (!validate_arg (arg0
, REAL_TYPE
)
7514 || !validate_arg (arg1
, POINTER_TYPE
)
7515 || !validate_arg (arg2
, POINTER_TYPE
))
7518 type
= TREE_TYPE (arg0
);
7520 /* Calculate the result when the argument is a constant. */
7521 if ((res
= do_mpfr_sincos (arg0
, arg1
, arg2
)))
7524 /* Canonicalize sincos to cexpi. */
7525 if (!TARGET_C99_FUNCTIONS
)
7527 fn
= mathfn_built_in (type
, BUILT_IN_CEXPI
);
7531 call
= build_call_expr (fn
, 1, arg0
);
7532 call
= builtin_save_expr (call
);
7534 return build2 (COMPOUND_EXPR
, type
,
7535 build2 (MODIFY_EXPR
, void_type_node
,
7536 build_fold_indirect_ref (arg1
),
7537 build1 (IMAGPART_EXPR
, type
, call
)),
7538 build2 (MODIFY_EXPR
, void_type_node
,
7539 build_fold_indirect_ref (arg2
),
7540 build1 (REALPART_EXPR
, type
, call
)));
7543 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7544 NULL_TREE if no simplification can be made. */
7547 fold_builtin_cexp (tree arg0
, tree type
)
7550 tree realp
, imagp
, ifn
;
7552 if (!validate_arg (arg0
, COMPLEX_TYPE
))
7555 rtype
= TREE_TYPE (TREE_TYPE (arg0
));
7557 /* In case we can figure out the real part of arg0 and it is constant zero
7559 if (!TARGET_C99_FUNCTIONS
)
7561 ifn
= mathfn_built_in (rtype
, BUILT_IN_CEXPI
);
7565 if ((realp
= fold_unary (REALPART_EXPR
, rtype
, arg0
))
7566 && real_zerop (realp
))
7568 tree narg
= fold_build1 (IMAGPART_EXPR
, rtype
, arg0
);
7569 return build_call_expr (ifn
, 1, narg
);
7572 /* In case we can easily decompose real and imaginary parts split cexp
7573 to exp (r) * cexpi (i). */
7574 if (flag_unsafe_math_optimizations
7577 tree rfn
, rcall
, icall
;
7579 rfn
= mathfn_built_in (rtype
, BUILT_IN_EXP
);
7583 imagp
= fold_unary (IMAGPART_EXPR
, rtype
, arg0
);
7587 icall
= build_call_expr (ifn
, 1, imagp
);
7588 icall
= builtin_save_expr (icall
);
7589 rcall
= build_call_expr (rfn
, 1, realp
);
7590 rcall
= builtin_save_expr (rcall
);
7591 return build2 (COMPLEX_EXPR
, type
,
7592 build2 (MULT_EXPR
, rtype
,
7594 build1 (REALPART_EXPR
, rtype
, icall
)),
7595 build2 (MULT_EXPR
, rtype
,
7597 build1 (IMAGPART_EXPR
, rtype
, icall
)));
7603 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7604 Return NULL_TREE if no simplification can be made. */
7607 fold_builtin_trunc (tree fndecl
, tree arg
)
7609 if (!validate_arg (arg
, REAL_TYPE
))
7612 /* Optimize trunc of constant value. */
7613 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7615 REAL_VALUE_TYPE r
, x
;
7616 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7618 x
= TREE_REAL_CST (arg
);
7619 real_trunc (&r
, TYPE_MODE (type
), &x
);
7620 return build_real (type
, r
);
7623 return fold_trunc_transparent_mathfn (fndecl
, arg
);
7626 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7627 Return NULL_TREE if no simplification can be made. */
7630 fold_builtin_floor (tree fndecl
, tree arg
)
7632 if (!validate_arg (arg
, REAL_TYPE
))
7635 /* Optimize floor of constant value. */
7636 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7640 x
= TREE_REAL_CST (arg
);
7641 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7643 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7646 real_floor (&r
, TYPE_MODE (type
), &x
);
7647 return build_real (type
, r
);
7651 /* Fold floor (x) where x is nonnegative to trunc (x). */
7652 if (tree_expr_nonnegative_p (arg
))
7654 tree truncfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_TRUNC
);
7656 return build_call_expr (truncfn
, 1, arg
);
7659 return fold_trunc_transparent_mathfn (fndecl
, arg
);
7662 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7663 Return NULL_TREE if no simplification can be made. */
7666 fold_builtin_ceil (tree fndecl
, tree arg
)
7668 if (!validate_arg (arg
, REAL_TYPE
))
7671 /* Optimize ceil of constant value. */
7672 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7676 x
= TREE_REAL_CST (arg
);
7677 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7679 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7682 real_ceil (&r
, TYPE_MODE (type
), &x
);
7683 return build_real (type
, r
);
7687 return fold_trunc_transparent_mathfn (fndecl
, arg
);
7690 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7691 Return NULL_TREE if no simplification can be made. */
7694 fold_builtin_round (tree fndecl
, tree arg
)
7696 if (!validate_arg (arg
, REAL_TYPE
))
7699 /* Optimize round of constant value. */
7700 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7704 x
= TREE_REAL_CST (arg
);
7705 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7707 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7710 real_round (&r
, TYPE_MODE (type
), &x
);
7711 return build_real (type
, r
);
7715 return fold_trunc_transparent_mathfn (fndecl
, arg
);
7718 /* Fold function call to builtin lround, lroundf or lroundl (or the
7719 corresponding long long versions) and other rounding functions. ARG
7720 is the argument to the call. Return NULL_TREE if no simplification
7724 fold_builtin_int_roundingfn (tree fndecl
, tree arg
)
7726 if (!validate_arg (arg
, REAL_TYPE
))
7729 /* Optimize lround of constant value. */
7730 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7732 const REAL_VALUE_TYPE x
= TREE_REAL_CST (arg
);
7734 if (! REAL_VALUE_ISNAN (x
) && ! REAL_VALUE_ISINF (x
))
7736 tree itype
= TREE_TYPE (TREE_TYPE (fndecl
));
7737 tree ftype
= TREE_TYPE (arg
);
7738 unsigned HOST_WIDE_INT lo2
;
7739 HOST_WIDE_INT hi
, lo
;
7742 switch (DECL_FUNCTION_CODE (fndecl
))
7744 CASE_FLT_FN (BUILT_IN_LFLOOR
):
7745 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7746 real_floor (&r
, TYPE_MODE (ftype
), &x
);
7749 CASE_FLT_FN (BUILT_IN_LCEIL
):
7750 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7751 real_ceil (&r
, TYPE_MODE (ftype
), &x
);
7754 CASE_FLT_FN (BUILT_IN_LROUND
):
7755 CASE_FLT_FN (BUILT_IN_LLROUND
):
7756 real_round (&r
, TYPE_MODE (ftype
), &x
);
7763 REAL_VALUE_TO_INT (&lo
, &hi
, r
);
7764 if (!fit_double_type (lo
, hi
, &lo2
, &hi
, itype
))
7765 return build_int_cst_wide (itype
, lo2
, hi
);
7769 switch (DECL_FUNCTION_CODE (fndecl
))
7771 CASE_FLT_FN (BUILT_IN_LFLOOR
):
7772 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7773 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7774 if (tree_expr_nonnegative_p (arg
))
7775 return fold_build1 (FIX_TRUNC_EXPR
, TREE_TYPE (TREE_TYPE (fndecl
)),
7781 return fold_fixed_mathfn (fndecl
, arg
);
7784 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7785 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7786 the argument to the call. Return NULL_TREE if no simplification can
7790 fold_builtin_bitop (tree fndecl
, tree arg
)
7792 if (!validate_arg (arg
, INTEGER_TYPE
))
7795 /* Optimize for constant argument. */
7796 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
7798 HOST_WIDE_INT hi
, width
, result
;
7799 unsigned HOST_WIDE_INT lo
;
7802 type
= TREE_TYPE (arg
);
7803 width
= TYPE_PRECISION (type
);
7804 lo
= TREE_INT_CST_LOW (arg
);
7806 /* Clear all the bits that are beyond the type's precision. */
7807 if (width
> HOST_BITS_PER_WIDE_INT
)
7809 hi
= TREE_INT_CST_HIGH (arg
);
7810 if (width
< 2 * HOST_BITS_PER_WIDE_INT
)
7811 hi
&= ~((HOST_WIDE_INT
) (-1) >> (width
- HOST_BITS_PER_WIDE_INT
));
7816 if (width
< HOST_BITS_PER_WIDE_INT
)
7817 lo
&= ~((unsigned HOST_WIDE_INT
) (-1) << width
);
7820 switch (DECL_FUNCTION_CODE (fndecl
))
7822 CASE_INT_FN (BUILT_IN_FFS
):
7824 result
= exact_log2 (lo
& -lo
) + 1;
7826 result
= HOST_BITS_PER_WIDE_INT
+ exact_log2 (hi
& -hi
) + 1;
7831 CASE_INT_FN (BUILT_IN_CLZ
):
7833 result
= width
- floor_log2 (hi
) - 1 - HOST_BITS_PER_WIDE_INT
;
7835 result
= width
- floor_log2 (lo
) - 1;
7836 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
7840 CASE_INT_FN (BUILT_IN_CTZ
):
7842 result
= exact_log2 (lo
& -lo
);
7844 result
= HOST_BITS_PER_WIDE_INT
+ exact_log2 (hi
& -hi
);
7845 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
7849 CASE_INT_FN (BUILT_IN_POPCOUNT
):
7852 result
++, lo
&= lo
- 1;
7854 result
++, hi
&= hi
- 1;
7857 CASE_INT_FN (BUILT_IN_PARITY
):
7860 result
++, lo
&= lo
- 1;
7862 result
++, hi
&= hi
- 1;
7870 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
7876 /* Fold function call to builtin_bswap and the long and long long
7877 variants. Return NULL_TREE if no simplification can be made. */
7879 fold_builtin_bswap (tree fndecl
, tree arg
)
7881 if (! validate_arg (arg
, INTEGER_TYPE
))
7884 /* Optimize constant value. */
7885 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
7887 HOST_WIDE_INT hi
, width
, r_hi
= 0;
7888 unsigned HOST_WIDE_INT lo
, r_lo
= 0;
7891 type
= TREE_TYPE (arg
);
7892 width
= TYPE_PRECISION (type
);
7893 lo
= TREE_INT_CST_LOW (arg
);
7894 hi
= TREE_INT_CST_HIGH (arg
);
7896 switch (DECL_FUNCTION_CODE (fndecl
))
7898 case BUILT_IN_BSWAP32
:
7899 case BUILT_IN_BSWAP64
:
7903 for (s
= 0; s
< width
; s
+= 8)
7905 int d
= width
- s
- 8;
7906 unsigned HOST_WIDE_INT byte
;
7908 if (s
< HOST_BITS_PER_WIDE_INT
)
7909 byte
= (lo
>> s
) & 0xff;
7911 byte
= (hi
>> (s
- HOST_BITS_PER_WIDE_INT
)) & 0xff;
7913 if (d
< HOST_BITS_PER_WIDE_INT
)
7916 r_hi
|= byte
<< (d
- HOST_BITS_PER_WIDE_INT
);
7926 if (width
< HOST_BITS_PER_WIDE_INT
)
7927 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), r_lo
);
7929 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl
)), r_lo
, r_hi
);
7935 /* Return true if EXPR is the real constant contained in VALUE. */
7938 real_dconstp (tree expr
, const REAL_VALUE_TYPE
*value
)
7942 return ((TREE_CODE (expr
) == REAL_CST
7943 && !TREE_OVERFLOW (expr
)
7944 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr
), *value
))
7945 || (TREE_CODE (expr
) == COMPLEX_CST
7946 && real_dconstp (TREE_REALPART (expr
), value
)
7947 && real_zerop (TREE_IMAGPART (expr
))));
7950 /* A subroutine of fold_builtin to fold the various logarithmic
7951 functions. Return NULL_TREE if no simplification can me made.
7952 FUNC is the corresponding MPFR logarithm function. */
7955 fold_builtin_logarithm (tree fndecl
, tree arg
,
7956 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
7958 if (validate_arg (arg
, REAL_TYPE
))
7960 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7962 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7964 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
7965 instead we'll look for 'e' truncated to MODE. So only do
7966 this if flag_unsafe_math_optimizations is set. */
7967 if (flag_unsafe_math_optimizations
&& func
== mpfr_log
)
7969 const REAL_VALUE_TYPE e_truncated
=
7970 real_value_truncate (TYPE_MODE (type
), dconste
);
7971 if (real_dconstp (arg
, &e_truncated
))
7972 return build_real (type
, dconst1
);
7975 /* Calculate the result when the argument is a constant. */
7976 if ((res
= do_mpfr_arg1 (arg
, type
, func
, &dconst0
, NULL
, false)))
7979 /* Special case, optimize logN(expN(x)) = x. */
7980 if (flag_unsafe_math_optimizations
7981 && ((func
== mpfr_log
7982 && (fcode
== BUILT_IN_EXP
7983 || fcode
== BUILT_IN_EXPF
7984 || fcode
== BUILT_IN_EXPL
))
7985 || (func
== mpfr_log2
7986 && (fcode
== BUILT_IN_EXP2
7987 || fcode
== BUILT_IN_EXP2F
7988 || fcode
== BUILT_IN_EXP2L
))
7989 || (func
== mpfr_log10
&& (BUILTIN_EXP10_P (fcode
)))))
7990 return fold_convert (type
, CALL_EXPR_ARG (arg
, 0));
7992 /* Optimize logN(func()) for various exponential functions. We
7993 want to determine the value "x" and the power "exponent" in
7994 order to transform logN(x**exponent) into exponent*logN(x). */
7995 if (flag_unsafe_math_optimizations
)
7997 tree exponent
= 0, x
= 0;
8001 CASE_FLT_FN (BUILT_IN_EXP
):
8002 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8003 x
= build_real (type
,
8004 real_value_truncate (TYPE_MODE (type
), dconste
));
8005 exponent
= CALL_EXPR_ARG (arg
, 0);
8007 CASE_FLT_FN (BUILT_IN_EXP2
):
8008 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8009 x
= build_real (type
, dconst2
);
8010 exponent
= CALL_EXPR_ARG (arg
, 0);
8012 CASE_FLT_FN (BUILT_IN_EXP10
):
8013 CASE_FLT_FN (BUILT_IN_POW10
):
8014 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8015 x
= build_real (type
, dconst10
);
8016 exponent
= CALL_EXPR_ARG (arg
, 0);
8018 CASE_FLT_FN (BUILT_IN_SQRT
):
8019 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8020 x
= CALL_EXPR_ARG (arg
, 0);
8021 exponent
= build_real (type
, dconsthalf
);
8023 CASE_FLT_FN (BUILT_IN_CBRT
):
8024 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8025 x
= CALL_EXPR_ARG (arg
, 0);
8026 exponent
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
8029 CASE_FLT_FN (BUILT_IN_POW
):
8030 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8031 x
= CALL_EXPR_ARG (arg
, 0);
8032 exponent
= CALL_EXPR_ARG (arg
, 1);
8038 /* Now perform the optimization. */
8041 tree logfn
= build_call_expr (fndecl
, 1, x
);
8042 return fold_build2 (MULT_EXPR
, type
, exponent
, logfn
);
8050 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8051 NULL_TREE if no simplification can be made. */
8054 fold_builtin_hypot (tree fndecl
, tree arg0
, tree arg1
, tree type
)
8056 tree res
, narg0
, narg1
;
8058 if (!validate_arg (arg0
, REAL_TYPE
)
8059 || !validate_arg (arg1
, REAL_TYPE
))
8062 /* Calculate the result when the argument is a constant. */
8063 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_hypot
)))
8066 /* If either argument to hypot has a negate or abs, strip that off.
8067 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8068 narg0
= fold_strip_sign_ops (arg0
);
8069 narg1
= fold_strip_sign_ops (arg1
);
8072 return build_call_expr (fndecl
, 2, narg0
? narg0
: arg0
,
8073 narg1
? narg1
: arg1
);
8076 /* If either argument is zero, hypot is fabs of the other. */
8077 if (real_zerop (arg0
))
8078 return fold_build1 (ABS_EXPR
, type
, arg1
);
8079 else if (real_zerop (arg1
))
8080 return fold_build1 (ABS_EXPR
, type
, arg0
);
8082 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8083 if (flag_unsafe_math_optimizations
8084 && operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8086 const REAL_VALUE_TYPE sqrt2_trunc
8087 = real_value_truncate (TYPE_MODE (type
), dconstsqrt2
);
8088 return fold_build2 (MULT_EXPR
, type
,
8089 fold_build1 (ABS_EXPR
, type
, arg0
),
8090 build_real (type
, sqrt2_trunc
));
8097 /* Fold a builtin function call to pow, powf, or powl. Return
8098 NULL_TREE if no simplification can be made. */
8100 fold_builtin_pow (tree fndecl
, tree arg0
, tree arg1
, tree type
)
8104 if (!validate_arg (arg0
, REAL_TYPE
)
8105 || !validate_arg (arg1
, REAL_TYPE
))
8108 /* Calculate the result when the argument is a constant. */
8109 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_pow
)))
8112 /* Optimize pow(1.0,y) = 1.0. */
8113 if (real_onep (arg0
))
8114 return omit_one_operand (type
, build_real (type
, dconst1
), arg1
);
8116 if (TREE_CODE (arg1
) == REAL_CST
8117 && !TREE_OVERFLOW (arg1
))
8119 REAL_VALUE_TYPE cint
;
8123 c
= TREE_REAL_CST (arg1
);
8125 /* Optimize pow(x,0.0) = 1.0. */
8126 if (REAL_VALUES_EQUAL (c
, dconst0
))
8127 return omit_one_operand (type
, build_real (type
, dconst1
),
8130 /* Optimize pow(x,1.0) = x. */
8131 if (REAL_VALUES_EQUAL (c
, dconst1
))
8134 /* Optimize pow(x,-1.0) = 1.0/x. */
8135 if (REAL_VALUES_EQUAL (c
, dconstm1
))
8136 return fold_build2 (RDIV_EXPR
, type
,
8137 build_real (type
, dconst1
), arg0
);
8139 /* Optimize pow(x,0.5) = sqrt(x). */
8140 if (flag_unsafe_math_optimizations
8141 && REAL_VALUES_EQUAL (c
, dconsthalf
))
8143 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
8145 if (sqrtfn
!= NULL_TREE
)
8146 return build_call_expr (sqrtfn
, 1, arg0
);
8149 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8150 if (flag_unsafe_math_optimizations
)
8152 const REAL_VALUE_TYPE dconstroot
8153 = real_value_truncate (TYPE_MODE (type
), dconstthird
);
8155 if (REAL_VALUES_EQUAL (c
, dconstroot
))
8157 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
8158 if (cbrtfn
!= NULL_TREE
)
8159 return build_call_expr (cbrtfn
, 1, arg0
);
8163 /* Check for an integer exponent. */
8164 n
= real_to_integer (&c
);
8165 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
8166 if (real_identical (&c
, &cint
))
8168 /* Attempt to evaluate pow at compile-time. */
8169 if (TREE_CODE (arg0
) == REAL_CST
8170 && !TREE_OVERFLOW (arg0
))
8175 x
= TREE_REAL_CST (arg0
);
8176 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
8177 if (flag_unsafe_math_optimizations
|| !inexact
)
8178 return build_real (type
, x
);
8181 /* Strip sign ops from even integer powers. */
8182 if ((n
& 1) == 0 && flag_unsafe_math_optimizations
)
8184 tree narg0
= fold_strip_sign_ops (arg0
);
8186 return build_call_expr (fndecl
, 2, narg0
, arg1
);
8191 if (flag_unsafe_math_optimizations
)
8193 const enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8195 /* Optimize pow(expN(x),y) = expN(x*y). */
8196 if (BUILTIN_EXPONENT_P (fcode
))
8198 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
8199 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8200 arg
= fold_build2 (MULT_EXPR
, type
, arg
, arg1
);
8201 return build_call_expr (expfn
, 1, arg
);
8204 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8205 if (BUILTIN_SQRT_P (fcode
))
8207 tree narg0
= CALL_EXPR_ARG (arg0
, 0);
8208 tree narg1
= fold_build2 (MULT_EXPR
, type
, arg1
,
8209 build_real (type
, dconsthalf
));
8210 return build_call_expr (fndecl
, 2, narg0
, narg1
);
8213 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8214 if (BUILTIN_CBRT_P (fcode
))
8216 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8217 if (tree_expr_nonnegative_p (arg
))
8219 const REAL_VALUE_TYPE dconstroot
8220 = real_value_truncate (TYPE_MODE (type
), dconstthird
);
8221 tree narg1
= fold_build2 (MULT_EXPR
, type
, arg1
,
8222 build_real (type
, dconstroot
));
8223 return build_call_expr (fndecl
, 2, arg
, narg1
);
8227 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8228 if (fcode
== BUILT_IN_POW
8229 || fcode
== BUILT_IN_POWF
8230 || fcode
== BUILT_IN_POWL
)
8232 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
8233 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
8234 tree narg1
= fold_build2 (MULT_EXPR
, type
, arg01
, arg1
);
8235 return build_call_expr (fndecl
, 2, arg00
, narg1
);
8242 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8243 Return NULL_TREE if no simplification can be made. */
8245 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED
,
8246 tree arg0
, tree arg1
, tree type
)
8248 if (!validate_arg (arg0
, REAL_TYPE
)
8249 || !validate_arg (arg1
, INTEGER_TYPE
))
8252 /* Optimize pow(1.0,y) = 1.0. */
8253 if (real_onep (arg0
))
8254 return omit_one_operand (type
, build_real (type
, dconst1
), arg1
);
8256 if (host_integerp (arg1
, 0))
8258 HOST_WIDE_INT c
= TREE_INT_CST_LOW (arg1
);
8260 /* Evaluate powi at compile-time. */
8261 if (TREE_CODE (arg0
) == REAL_CST
8262 && !TREE_OVERFLOW (arg0
))
8265 x
= TREE_REAL_CST (arg0
);
8266 real_powi (&x
, TYPE_MODE (type
), &x
, c
);
8267 return build_real (type
, x
);
8270 /* Optimize pow(x,0) = 1.0. */
8272 return omit_one_operand (type
, build_real (type
, dconst1
),
8275 /* Optimize pow(x,1) = x. */
8279 /* Optimize pow(x,-1) = 1.0/x. */
8281 return fold_build2 (RDIV_EXPR
, type
,
8282 build_real (type
, dconst1
), arg0
);
8288 /* A subroutine of fold_builtin to fold the various exponent
8289 functions. Return NULL_TREE if no simplification can be made.
8290 FUNC is the corresponding MPFR exponent function. */
8293 fold_builtin_exponent (tree fndecl
, tree arg
,
8294 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8296 if (validate_arg (arg
, REAL_TYPE
))
8298 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8301 /* Calculate the result when the argument is a constant. */
8302 if ((res
= do_mpfr_arg1 (arg
, type
, func
, NULL
, NULL
, 0)))
8305 /* Optimize expN(logN(x)) = x. */
8306 if (flag_unsafe_math_optimizations
)
8308 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8310 if ((func
== mpfr_exp
8311 && (fcode
== BUILT_IN_LOG
8312 || fcode
== BUILT_IN_LOGF
8313 || fcode
== BUILT_IN_LOGL
))
8314 || (func
== mpfr_exp2
8315 && (fcode
== BUILT_IN_LOG2
8316 || fcode
== BUILT_IN_LOG2F
8317 || fcode
== BUILT_IN_LOG2L
))
8318 || (func
== mpfr_exp10
8319 && (fcode
== BUILT_IN_LOG10
8320 || fcode
== BUILT_IN_LOG10F
8321 || fcode
== BUILT_IN_LOG10L
)))
8322 return fold_convert (type
, CALL_EXPR_ARG (arg
, 0));
8329 /* Return true if VAR is a VAR_DECL or a component thereof. */
8332 var_decl_component_p (tree var
)
8335 while (handled_component_p (inner
))
8336 inner
= TREE_OPERAND (inner
, 0);
8337 return SSA_VAR_P (inner
);
8340 /* Fold function call to builtin memset. Return
8341 NULL_TREE if no simplification can be made. */
8344 fold_builtin_memset (tree dest
, tree c
, tree len
, tree type
, bool ignore
)
8347 unsigned HOST_WIDE_INT length
, cval
;
8349 if (! validate_arg (dest
, POINTER_TYPE
)
8350 || ! validate_arg (c
, INTEGER_TYPE
)
8351 || ! validate_arg (len
, INTEGER_TYPE
))
8354 if (! host_integerp (len
, 1))
8357 /* If the LEN parameter is zero, return DEST. */
8358 if (integer_zerop (len
))
8359 return omit_one_operand (type
, dest
, c
);
8361 if (! host_integerp (c
, 1) || TREE_SIDE_EFFECTS (dest
))
8366 if (TREE_CODE (var
) != ADDR_EXPR
)
8369 var
= TREE_OPERAND (var
, 0);
8370 if (TREE_THIS_VOLATILE (var
))
8373 if (!INTEGRAL_TYPE_P (TREE_TYPE (var
))
8374 && !POINTER_TYPE_P (TREE_TYPE (var
)))
8377 if (! var_decl_component_p (var
))
8380 length
= tree_low_cst (len
, 1);
8381 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var
))) != length
8382 || get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
8386 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
8389 if (integer_zerop (c
))
8393 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
8396 cval
= tree_low_cst (c
, 1);
8400 cval
|= (cval
<< 31) << 1;
8403 ret
= build_int_cst_type (TREE_TYPE (var
), cval
);
8404 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, ret
);
8408 return omit_one_operand (type
, dest
, ret
);
8411 /* Fold function call to builtin memset. Return
8412 NULL_TREE if no simplification can be made. */
8415 fold_builtin_bzero (tree dest
, tree size
, bool ignore
)
8417 if (! validate_arg (dest
, POINTER_TYPE
)
8418 || ! validate_arg (size
, INTEGER_TYPE
))
8424 /* New argument list transforming bzero(ptr x, int y) to
8425 memset(ptr x, int 0, size_t y). This is done this way
8426 so that if it isn't expanded inline, we fallback to
8427 calling bzero instead of memset. */
8429 return fold_builtin_memset (dest
, integer_zero_node
,
8430 fold_convert (sizetype
, size
),
8431 void_type_node
, ignore
);
8434 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8435 NULL_TREE if no simplification can be made.
8436 If ENDP is 0, return DEST (like memcpy).
8437 If ENDP is 1, return DEST+LEN (like mempcpy).
8438 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8439 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8443 fold_builtin_memory_op (tree dest
, tree src
, tree len
, tree type
, bool ignore
, int endp
)
8445 tree destvar
, srcvar
, expr
;
8447 if (! validate_arg (dest
, POINTER_TYPE
)
8448 || ! validate_arg (src
, POINTER_TYPE
)
8449 || ! validate_arg (len
, INTEGER_TYPE
))
8452 /* If the LEN parameter is zero, return DEST. */
8453 if (integer_zerop (len
))
8454 return omit_one_operand (type
, dest
, src
);
8456 /* If SRC and DEST are the same (and not volatile), return
8457 DEST{,+LEN,+LEN-1}. */
8458 if (operand_equal_p (src
, dest
, 0))
8462 tree srctype
, desttype
;
8465 int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
8466 int dest_align
= get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
8468 /* Both DEST and SRC must be pointer types.
8469 ??? This is what old code did. Is the testing for pointer types
8472 If either SRC is readonly or length is 1, we can use memcpy. */
8473 if (dest_align
&& src_align
8474 && (readonly_data_expr (src
)
8475 || (host_integerp (len
, 1)
8476 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
>=
8477 tree_low_cst (len
, 1)))))
8479 tree fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
8482 return build_call_expr (fn
, 3, dest
, src
, len
);
8487 if (!host_integerp (len
, 0))
8490 This logic lose for arguments like (type *)malloc (sizeof (type)),
8491 since we strip the casts of up to VOID return value from malloc.
8492 Perhaps we ought to inherit type from non-VOID argument here? */
8495 srctype
= TREE_TYPE (TREE_TYPE (src
));
8496 desttype
= TREE_TYPE (TREE_TYPE (dest
));
8497 if (!srctype
|| !desttype
8498 || !TYPE_SIZE_UNIT (srctype
)
8499 || !TYPE_SIZE_UNIT (desttype
)
8500 || TREE_CODE (TYPE_SIZE_UNIT (srctype
)) != INTEGER_CST
8501 || TREE_CODE (TYPE_SIZE_UNIT (desttype
)) != INTEGER_CST
8502 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
)
8503 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
8506 if (get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
)
8507 < (int) TYPE_ALIGN (desttype
)
8508 || (get_pointer_alignment (src
, BIGGEST_ALIGNMENT
)
8509 < (int) TYPE_ALIGN (srctype
)))
8513 dest
= builtin_save_expr (dest
);
8515 srcvar
= build_fold_indirect_ref (src
);
8516 if (TREE_THIS_VOLATILE (srcvar
))
8518 if (!tree_int_cst_equal (lang_hooks
.expr_size (srcvar
), len
))
8520 /* With memcpy, it is possible to bypass aliasing rules, so without
8521 this check i. e. execute/20060930-2.c would be misoptimized, because
8522 it use conflicting alias set to hold argument for the memcpy call.
8523 This check is probably unnecesary with -fno-strict-aliasing.
8524 Similarly for destvar. See also PR29286. */
8525 if (!var_decl_component_p (srcvar
)
8526 /* Accept: memcpy (*char_var, "test", 1); that simplify
8528 || is_gimple_min_invariant (srcvar
)
8529 || readonly_data_expr (src
))
8532 destvar
= build_fold_indirect_ref (dest
);
8533 if (TREE_THIS_VOLATILE (destvar
))
8535 if (!tree_int_cst_equal (lang_hooks
.expr_size (destvar
), len
))
8537 if (!var_decl_component_p (destvar
))
8540 if (srctype
== desttype
8541 || (gimple_in_ssa_p (cfun
)
8542 && tree_ssa_useless_type_conversion_1 (desttype
, srctype
)))
8544 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar
))
8545 || POINTER_TYPE_P (TREE_TYPE (srcvar
)))
8546 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar
))
8547 || POINTER_TYPE_P (TREE_TYPE (destvar
))))
8548 expr
= fold_convert (TREE_TYPE (destvar
), srcvar
);
8550 expr
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (destvar
), srcvar
);
8551 expr
= build2 (MODIFY_EXPR
, TREE_TYPE (destvar
), destvar
, expr
);
8557 if (endp
== 0 || endp
== 3)
8558 return omit_one_operand (type
, dest
, expr
);
8564 len
= fold_build2 (MINUS_EXPR
, TREE_TYPE (len
), len
,
8567 len
= fold_convert (TREE_TYPE (dest
), len
);
8568 dest
= fold_build2 (PLUS_EXPR
, TREE_TYPE (dest
), dest
, len
);
8569 dest
= fold_convert (type
, dest
);
8571 dest
= omit_one_operand (type
, dest
, expr
);
8575 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8576 If LEN is not NULL, it represents the length of the string to be
8577 copied. Return NULL_TREE if no simplification can be made. */
8580 fold_builtin_strcpy (tree fndecl
, tree dest
, tree src
, tree len
)
8584 if (!validate_arg (dest
, POINTER_TYPE
)
8585 || !validate_arg (src
, POINTER_TYPE
))
8588 /* If SRC and DEST are the same (and not volatile), return DEST. */
8589 if (operand_equal_p (src
, dest
, 0))
8590 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
8595 fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
8601 len
= c_strlen (src
, 1);
8602 if (! len
|| TREE_SIDE_EFFECTS (len
))
8606 len
= size_binop (PLUS_EXPR
, len
, ssize_int (1));
8607 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)),
8608 build_call_expr (fn
, 3, dest
, src
, len
));
8611 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8612 If SLEN is not NULL, it represents the length of the source string.
8613 Return NULL_TREE if no simplification can be made. */
8616 fold_builtin_strncpy (tree fndecl
, tree dest
, tree src
, tree len
, tree slen
)
8620 if (!validate_arg (dest
, POINTER_TYPE
)
8621 || !validate_arg (src
, POINTER_TYPE
)
8622 || !validate_arg (len
, INTEGER_TYPE
))
8625 /* If the LEN parameter is zero, return DEST. */
8626 if (integer_zerop (len
))
8627 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
8629 /* We can't compare slen with len as constants below if len is not a
8631 if (len
== 0 || TREE_CODE (len
) != INTEGER_CST
)
8635 slen
= c_strlen (src
, 1);
8637 /* Now, we must be passed a constant src ptr parameter. */
8638 if (slen
== 0 || TREE_CODE (slen
) != INTEGER_CST
)
8641 slen
= size_binop (PLUS_EXPR
, slen
, ssize_int (1));
8643 /* We do not support simplification of this case, though we do
8644 support it when expanding trees into RTL. */
8645 /* FIXME: generate a call to __builtin_memset. */
8646 if (tree_int_cst_lt (slen
, len
))
8649 /* OK transform into builtin memcpy. */
8650 fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
8653 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)),
8654 build_call_expr (fn
, 3, dest
, src
, len
));
8657 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8658 Return NULL_TREE if no simplification can be made. */
8661 fold_builtin_memcmp (tree arg1
, tree arg2
, tree len
)
8663 const char *p1
, *p2
;
8665 if (!validate_arg (arg1
, POINTER_TYPE
)
8666 || !validate_arg (arg2
, POINTER_TYPE
)
8667 || !validate_arg (len
, INTEGER_TYPE
))
8670 /* If the LEN parameter is zero, return zero. */
8671 if (integer_zerop (len
))
8672 return omit_two_operands (integer_type_node
, integer_zero_node
,
8675 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8676 if (operand_equal_p (arg1
, arg2
, 0))
8677 return omit_one_operand (integer_type_node
, integer_zero_node
, len
);
8679 p1
= c_getstr (arg1
);
8680 p2
= c_getstr (arg2
);
8682 /* If all arguments are constant, and the value of len is not greater
8683 than the lengths of arg1 and arg2, evaluate at compile-time. */
8684 if (host_integerp (len
, 1) && p1
&& p2
8685 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
8686 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
8688 const int r
= memcmp (p1
, p2
, tree_low_cst (len
, 1));
8691 return integer_one_node
;
8693 return integer_minus_one_node
;
8695 return integer_zero_node
;
8698 /* If len parameter is one, return an expression corresponding to
8699 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8700 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
8702 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8703 tree cst_uchar_ptr_node
8704 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8706 tree ind1
= fold_convert (integer_type_node
,
8707 build1 (INDIRECT_REF
, cst_uchar_node
,
8708 fold_convert (cst_uchar_ptr_node
,
8710 tree ind2
= fold_convert (integer_type_node
,
8711 build1 (INDIRECT_REF
, cst_uchar_node
,
8712 fold_convert (cst_uchar_ptr_node
,
8714 return fold_build2 (MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8720 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8721 Return NULL_TREE if no simplification can be made. */
8724 fold_builtin_strcmp (tree arg1
, tree arg2
)
8726 const char *p1
, *p2
;
8728 if (!validate_arg (arg1
, POINTER_TYPE
)
8729 || !validate_arg (arg2
, POINTER_TYPE
))
8732 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8733 if (operand_equal_p (arg1
, arg2
, 0))
8734 return integer_zero_node
;
8736 p1
= c_getstr (arg1
);
8737 p2
= c_getstr (arg2
);
8741 const int i
= strcmp (p1
, p2
);
8743 return integer_minus_one_node
;
8745 return integer_one_node
;
8747 return integer_zero_node
;
8750 /* If the second arg is "", return *(const unsigned char*)arg1. */
8751 if (p2
&& *p2
== '\0')
8753 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8754 tree cst_uchar_ptr_node
8755 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8757 return fold_convert (integer_type_node
,
8758 build1 (INDIRECT_REF
, cst_uchar_node
,
8759 fold_convert (cst_uchar_ptr_node
,
8763 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8764 if (p1
&& *p1
== '\0')
8766 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8767 tree cst_uchar_ptr_node
8768 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8770 tree temp
= fold_convert (integer_type_node
,
8771 build1 (INDIRECT_REF
, cst_uchar_node
,
8772 fold_convert (cst_uchar_ptr_node
,
8774 return fold_build1 (NEGATE_EXPR
, integer_type_node
, temp
);
8780 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8781 Return NULL_TREE if no simplification can be made. */
8784 fold_builtin_strncmp (tree arg1
, tree arg2
, tree len
)
8786 const char *p1
, *p2
;
8788 if (!validate_arg (arg1
, POINTER_TYPE
)
8789 || !validate_arg (arg2
, POINTER_TYPE
)
8790 || !validate_arg (len
, INTEGER_TYPE
))
8793 /* If the LEN parameter is zero, return zero. */
8794 if (integer_zerop (len
))
8795 return omit_two_operands (integer_type_node
, integer_zero_node
,
8798 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8799 if (operand_equal_p (arg1
, arg2
, 0))
8800 return omit_one_operand (integer_type_node
, integer_zero_node
, len
);
8802 p1
= c_getstr (arg1
);
8803 p2
= c_getstr (arg2
);
8805 if (host_integerp (len
, 1) && p1
&& p2
)
8807 const int i
= strncmp (p1
, p2
, tree_low_cst (len
, 1));
8809 return integer_one_node
;
8811 return integer_minus_one_node
;
8813 return integer_zero_node
;
8816 /* If the second arg is "", and the length is greater than zero,
8817 return *(const unsigned char*)arg1. */
8818 if (p2
&& *p2
== '\0'
8819 && TREE_CODE (len
) == INTEGER_CST
8820 && tree_int_cst_sgn (len
) == 1)
8822 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8823 tree cst_uchar_ptr_node
8824 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8826 return fold_convert (integer_type_node
,
8827 build1 (INDIRECT_REF
, cst_uchar_node
,
8828 fold_convert (cst_uchar_ptr_node
,
8832 /* If the first arg is "", and the length is greater than zero,
8833 return -*(const unsigned char*)arg2. */
8834 if (p1
&& *p1
== '\0'
8835 && TREE_CODE (len
) == INTEGER_CST
8836 && tree_int_cst_sgn (len
) == 1)
8838 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8839 tree cst_uchar_ptr_node
8840 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8842 tree temp
= fold_convert (integer_type_node
,
8843 build1 (INDIRECT_REF
, cst_uchar_node
,
8844 fold_convert (cst_uchar_ptr_node
,
8846 return fold_build1 (NEGATE_EXPR
, integer_type_node
, temp
);
8849 /* If len parameter is one, return an expression corresponding to
8850 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8851 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
8853 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8854 tree cst_uchar_ptr_node
8855 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8857 tree ind1
= fold_convert (integer_type_node
,
8858 build1 (INDIRECT_REF
, cst_uchar_node
,
8859 fold_convert (cst_uchar_ptr_node
,
8861 tree ind2
= fold_convert (integer_type_node
,
8862 build1 (INDIRECT_REF
, cst_uchar_node
,
8863 fold_convert (cst_uchar_ptr_node
,
8865 return fold_build2 (MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8871 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8872 ARG. Return NULL_TREE if no simplification can be made. */
8875 fold_builtin_signbit (tree arg
, tree type
)
8879 if (!validate_arg (arg
, REAL_TYPE
))
8882 /* If ARG is a compile-time constant, determine the result. */
8883 if (TREE_CODE (arg
) == REAL_CST
8884 && !TREE_OVERFLOW (arg
))
8888 c
= TREE_REAL_CST (arg
);
8889 temp
= REAL_VALUE_NEGATIVE (c
) ? integer_one_node
: integer_zero_node
;
8890 return fold_convert (type
, temp
);
8893 /* If ARG is non-negative, the result is always zero. */
8894 if (tree_expr_nonnegative_p (arg
))
8895 return omit_one_operand (type
, integer_zero_node
, arg
);
8897 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8898 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg
))))
8899 return fold_build2 (LT_EXPR
, type
, arg
,
8900 build_real (TREE_TYPE (arg
), dconst0
));
8905 /* Fold function call to builtin copysign, copysignf or copysignl with
8906 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8910 fold_builtin_copysign (tree fndecl
, tree arg1
, tree arg2
, tree type
)
8914 if (!validate_arg (arg1
, REAL_TYPE
)
8915 || !validate_arg (arg2
, REAL_TYPE
))
8918 /* copysign(X,X) is X. */
8919 if (operand_equal_p (arg1
, arg2
, 0))
8920 return fold_convert (type
, arg1
);
8922 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8923 if (TREE_CODE (arg1
) == REAL_CST
8924 && TREE_CODE (arg2
) == REAL_CST
8925 && !TREE_OVERFLOW (arg1
)
8926 && !TREE_OVERFLOW (arg2
))
8928 REAL_VALUE_TYPE c1
, c2
;
8930 c1
= TREE_REAL_CST (arg1
);
8931 c2
= TREE_REAL_CST (arg2
);
8932 /* c1.sign := c2.sign. */
8933 real_copysign (&c1
, &c2
);
8934 return build_real (type
, c1
);
8937 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8938 Remember to evaluate Y for side-effects. */
8939 if (tree_expr_nonnegative_p (arg2
))
8940 return omit_one_operand (type
,
8941 fold_build1 (ABS_EXPR
, type
, arg1
),
8944 /* Strip sign changing operations for the first argument. */
8945 tem
= fold_strip_sign_ops (arg1
);
8947 return build_call_expr (fndecl
, 2, tem
, arg2
);
8952 /* Fold a call to builtin isascii with argument ARG. */
8955 fold_builtin_isascii (tree arg
)
8957 if (!validate_arg (arg
, INTEGER_TYPE
))
8961 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8962 arg
= build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
8963 build_int_cst (NULL_TREE
,
8964 ~ (unsigned HOST_WIDE_INT
) 0x7f));
8965 return fold_build2 (EQ_EXPR
, integer_type_node
,
8966 arg
, integer_zero_node
);
8970 /* Fold a call to builtin toascii with argument ARG. */
8973 fold_builtin_toascii (tree arg
)
8975 if (!validate_arg (arg
, INTEGER_TYPE
))
8978 /* Transform toascii(c) -> (c & 0x7f). */
8979 return fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
8980 build_int_cst (NULL_TREE
, 0x7f));
8983 /* Fold a call to builtin isdigit with argument ARG. */
8986 fold_builtin_isdigit (tree arg
)
8988 if (!validate_arg (arg
, INTEGER_TYPE
))
8992 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8993 /* According to the C standard, isdigit is unaffected by locale.
8994 However, it definitely is affected by the target character set. */
8995 unsigned HOST_WIDE_INT target_digit0
8996 = lang_hooks
.to_target_charset ('0');
8998 if (target_digit0
== 0)
9001 arg
= fold_convert (unsigned_type_node
, arg
);
9002 arg
= build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
9003 build_int_cst (unsigned_type_node
, target_digit0
));
9004 return fold_build2 (LE_EXPR
, integer_type_node
, arg
,
9005 build_int_cst (unsigned_type_node
, 9));
9009 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9012 fold_builtin_fabs (tree arg
, tree type
)
9014 if (!validate_arg (arg
, REAL_TYPE
))
9017 arg
= fold_convert (type
, arg
);
9018 if (TREE_CODE (arg
) == REAL_CST
)
9019 return fold_abs_const (arg
, type
);
9020 return fold_build1 (ABS_EXPR
, type
, arg
);
9023 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9026 fold_builtin_abs (tree arg
, tree type
)
9028 if (!validate_arg (arg
, INTEGER_TYPE
))
9031 arg
= fold_convert (type
, arg
);
9032 if (TREE_CODE (arg
) == INTEGER_CST
)
9033 return fold_abs_const (arg
, type
);
9034 return fold_build1 (ABS_EXPR
, type
, arg
);
9037 /* Fold a call to builtin fmin or fmax. */
9040 fold_builtin_fmin_fmax (tree arg0
, tree arg1
, tree type
, bool max
)
9042 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
9044 /* Calculate the result when the argument is a constant. */
9045 tree res
= do_mpfr_arg2 (arg0
, arg1
, type
, (max
? mpfr_max
: mpfr_min
));
9050 /* If either argument is NaN, return the other one. Avoid the
9051 transformation if we get (and honor) a signalling NaN. Using
9052 omit_one_operand() ensures we create a non-lvalue. */
9053 if (TREE_CODE (arg0
) == REAL_CST
9054 && real_isnan (&TREE_REAL_CST (arg0
))
9055 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
9056 || ! TREE_REAL_CST (arg0
).signalling
))
9057 return omit_one_operand (type
, arg1
, arg0
);
9058 if (TREE_CODE (arg1
) == REAL_CST
9059 && real_isnan (&TREE_REAL_CST (arg1
))
9060 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
)))
9061 || ! TREE_REAL_CST (arg1
).signalling
))
9062 return omit_one_operand (type
, arg0
, arg1
);
9064 /* Transform fmin/fmax(x,x) -> x. */
9065 if (operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
9066 return omit_one_operand (type
, arg0
, arg1
);
9068 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9069 functions to return the numeric arg if the other one is NaN.
9070 These tree codes don't honor that, so only transform if
9071 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9072 handled, so we don't have to worry about it either. */
9073 if (flag_finite_math_only
)
9074 return fold_build2 ((max
? MAX_EXPR
: MIN_EXPR
), type
,
9075 fold_convert (type
, arg0
),
9076 fold_convert (type
, arg1
));
9081 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9084 fold_builtin_carg (tree arg
, tree type
)
9086 if (validate_arg (arg
, COMPLEX_TYPE
))
9088 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
9092 tree new_arg
= builtin_save_expr (arg
);
9093 tree r_arg
= fold_build1 (REALPART_EXPR
, type
, new_arg
);
9094 tree i_arg
= fold_build1 (IMAGPART_EXPR
, type
, new_arg
);
9095 return build_call_expr (atan2_fn
, 2, i_arg
, r_arg
);
9102 /* Fold a call to builtin logb/ilogb. */
9105 fold_builtin_logb (tree arg
, tree rettype
)
9107 if (! validate_arg (arg
, REAL_TYPE
))
9112 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9114 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9120 /* If arg is Inf or NaN and we're logb, return it. */
9121 if (TREE_CODE (rettype
) == REAL_TYPE
)
9122 return fold_convert (rettype
, arg
);
9123 /* Fall through... */
9125 /* Zero may set errno and/or raise an exception for logb, also
9126 for ilogb we don't know FP_ILOGB0. */
9129 /* For normal numbers, proceed iff radix == 2. In GCC,
9130 normalized significands are in the range [0.5, 1.0). We
9131 want the exponent as if they were [1.0, 2.0) so get the
9132 exponent and subtract 1. */
9133 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9134 return fold_convert (rettype
, build_int_cst (NULL_TREE
,
9135 REAL_EXP (value
)-1));
9143 /* Fold a call to builtin significand, if radix == 2. */
9146 fold_builtin_significand (tree arg
, tree rettype
)
9148 if (! validate_arg (arg
, REAL_TYPE
))
9153 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9155 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9162 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9163 return fold_convert (rettype
, arg
);
9165 /* For normal numbers, proceed iff radix == 2. */
9166 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9168 REAL_VALUE_TYPE result
= *value
;
9169 /* In GCC, normalized significands are in the range [0.5,
9170 1.0). We want them to be [1.0, 2.0) so set the
9172 SET_REAL_EXP (&result
, 1);
9173 return build_real (rettype
, result
);
9182 /* Fold a call to builtin frexp, we can assume the base is 2. */
9185 fold_builtin_frexp (tree arg0
, tree arg1
, tree rettype
)
9187 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9192 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9195 arg1
= build_fold_indirect_ref (arg1
);
9197 /* Proceed if a valid pointer type was passed in. */
9198 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
9200 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9206 /* For +-0, return (*exp = 0, +-0). */
9207 exp
= integer_zero_node
;
9212 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9213 return omit_one_operand (rettype
, arg0
, arg1
);
9216 /* Since the frexp function always expects base 2, and in
9217 GCC normalized significands are already in the range
9218 [0.5, 1.0), we have exactly what frexp wants. */
9219 REAL_VALUE_TYPE frac_rvt
= *value
;
9220 SET_REAL_EXP (&frac_rvt
, 0);
9221 frac
= build_real (rettype
, frac_rvt
);
9222 exp
= build_int_cst (NULL_TREE
, REAL_EXP (value
));
9229 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9230 arg1
= fold_build2 (MODIFY_EXPR
, rettype
, arg1
, exp
);
9231 TREE_SIDE_EFFECTS (arg1
) = 1;
9232 return fold_build2 (COMPOUND_EXPR
, rettype
, arg1
, frac
);
9238 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9239 then we can assume the base is two. If it's false, then we have to
9240 check the mode of the TYPE parameter in certain cases. */
9243 fold_builtin_load_exponent (tree arg0
, tree arg1
, tree type
, bool ldexp
)
9245 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, INTEGER_TYPE
))
9250 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9251 if (real_zerop (arg0
) || integer_zerop (arg1
)
9252 || (TREE_CODE (arg0
) == REAL_CST
9253 && (real_isnan (&TREE_REAL_CST (arg0
))
9254 || real_isinf (&TREE_REAL_CST (arg0
)))))
9255 return omit_one_operand (type
, arg0
, arg1
);
9257 /* If both arguments are constant, then try to evaluate it. */
9258 if ((ldexp
|| REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2)
9259 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
9260 && host_integerp (arg1
, 0))
9262 /* Bound the maximum adjustment to twice the range of the
9263 mode's valid exponents. Use abs to ensure the range is
9264 positive as a sanity check. */
9265 const long max_exp_adj
= 2 *
9266 labs (REAL_MODE_FORMAT (TYPE_MODE (type
))->emax
9267 - REAL_MODE_FORMAT (TYPE_MODE (type
))->emin
);
9269 /* Get the user-requested adjustment. */
9270 const HOST_WIDE_INT req_exp_adj
= tree_low_cst (arg1
, 0);
9272 /* The requested adjustment must be inside this range. This
9273 is a preliminary cap to avoid things like overflow, we
9274 may still fail to compute the result for other reasons. */
9275 if (-max_exp_adj
< req_exp_adj
&& req_exp_adj
< max_exp_adj
)
9277 REAL_VALUE_TYPE initial_result
;
9279 real_ldexp (&initial_result
, &TREE_REAL_CST (arg0
), req_exp_adj
);
9281 /* Ensure we didn't overflow. */
9282 if (! real_isinf (&initial_result
))
9284 const REAL_VALUE_TYPE trunc_result
9285 = real_value_truncate (TYPE_MODE (type
), initial_result
);
9287 /* Only proceed if the target mode can hold the
9289 if (REAL_VALUES_EQUAL (initial_result
, trunc_result
))
9290 return build_real (type
, trunc_result
);
9299 /* Fold a call to builtin modf. */
9302 fold_builtin_modf (tree arg0
, tree arg1
, tree rettype
)
9304 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9309 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9312 arg1
= build_fold_indirect_ref (arg1
);
9314 /* Proceed if a valid pointer type was passed in. */
9315 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
9317 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9318 REAL_VALUE_TYPE trunc
, frac
;
9324 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9325 trunc
= frac
= *value
;
9328 /* For +-Inf, return (*arg1 = arg0, +-0). */
9330 frac
.sign
= value
->sign
;
9334 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9335 real_trunc (&trunc
, VOIDmode
, value
);
9336 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
9337 /* If the original number was negative and already
9338 integral, then the fractional part is -0.0. */
9339 if (value
->sign
&& frac
.cl
== rvc_zero
)
9340 frac
.sign
= value
->sign
;
9344 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9345 arg1
= fold_build2 (MODIFY_EXPR
, rettype
, arg1
,
9346 build_real (rettype
, trunc
));
9347 TREE_SIDE_EFFECTS (arg1
) = 1;
9348 return fold_build2 (COMPOUND_EXPR
, rettype
, arg1
,
9349 build_real (rettype
, frac
));
9355 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9356 ARG is the argument for the call. */
9359 fold_builtin_classify (tree fndecl
, tree arg
, int builtin_index
)
9361 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9364 if (!validate_arg (arg
, REAL_TYPE
))
9366 error ("non-floating-point argument to function %qs",
9367 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
9368 return error_mark_node
;
9371 switch (builtin_index
)
9373 case BUILT_IN_ISINF
:
9374 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
9375 return omit_one_operand (type
, integer_zero_node
, arg
);
9377 if (TREE_CODE (arg
) == REAL_CST
)
9379 r
= TREE_REAL_CST (arg
);
9380 if (real_isinf (&r
))
9381 return real_compare (GT_EXPR
, &r
, &dconst0
)
9382 ? integer_one_node
: integer_minus_one_node
;
9384 return integer_zero_node
;
9389 case BUILT_IN_FINITE
:
9390 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
)))
9391 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
9392 return omit_one_operand (type
, integer_one_node
, arg
);
9394 if (TREE_CODE (arg
) == REAL_CST
)
9396 r
= TREE_REAL_CST (arg
);
9397 return real_isinf (&r
) || real_isnan (&r
)
9398 ? integer_zero_node
: integer_one_node
;
9403 case BUILT_IN_ISNAN
:
9404 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
))))
9405 return omit_one_operand (type
, integer_zero_node
, arg
);
9407 if (TREE_CODE (arg
) == REAL_CST
)
9409 r
= TREE_REAL_CST (arg
);
9410 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
9413 arg
= builtin_save_expr (arg
);
9414 return fold_build2 (UNORDERED_EXPR
, type
, arg
, arg
);
9421 /* Fold a call to an unordered comparison function such as
9422 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9423 being called and ARG0 and ARG1 are the arguments for the call.
9424 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9425 the opposite of the desired result. UNORDERED_CODE is used
9426 for modes that can hold NaNs and ORDERED_CODE is used for
9430 fold_builtin_unordered_cmp (tree fndecl
, tree arg0
, tree arg1
,
9431 enum tree_code unordered_code
,
9432 enum tree_code ordered_code
)
9434 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9435 enum tree_code code
;
9437 enum tree_code code0
, code1
;
9438 tree cmp_type
= NULL_TREE
;
9440 type0
= TREE_TYPE (arg0
);
9441 type1
= TREE_TYPE (arg1
);
9443 code0
= TREE_CODE (type0
);
9444 code1
= TREE_CODE (type1
);
9446 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
9447 /* Choose the wider of two real types. */
9448 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
9450 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
9452 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
9456 error ("non-floating-point argument to function %qs",
9457 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
9458 return error_mark_node
;
9461 arg0
= fold_convert (cmp_type
, arg0
);
9462 arg1
= fold_convert (cmp_type
, arg1
);
9464 if (unordered_code
== UNORDERED_EXPR
)
9466 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9467 return omit_two_operands (type
, integer_zero_node
, arg0
, arg1
);
9468 return fold_build2 (UNORDERED_EXPR
, type
, arg0
, arg1
);
9471 code
= HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))) ? unordered_code
9473 return fold_build1 (TRUTH_NOT_EXPR
, type
,
9474 fold_build2 (code
, type
, arg0
, arg1
));
9477 /* Fold a call to built-in function FNDECL with 0 arguments.
9478 IGNORE is true if the result of the function call is ignored. This
9479 function returns NULL_TREE if no simplification was possible. */
9482 fold_builtin_0 (tree fndecl
, bool ignore ATTRIBUTE_UNUSED
)
9484 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9485 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9488 CASE_FLT_FN (BUILT_IN_INF
):
9489 case BUILT_IN_INFD32
:
9490 case BUILT_IN_INFD64
:
9491 case BUILT_IN_INFD128
:
9492 return fold_builtin_inf (type
, true);
9494 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
9495 return fold_builtin_inf (type
, false);
9497 case BUILT_IN_CLASSIFY_TYPE
:
9498 return fold_builtin_classify_type (NULL_TREE
);
9506 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9507 IGNORE is true if the result of the function call is ignored. This
9508 function returns NULL_TREE if no simplification was possible. */
9511 fold_builtin_1 (tree fndecl
, tree arg0
, bool ignore
)
9513 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9514 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9518 case BUILT_IN_CONSTANT_P
:
9520 tree val
= fold_builtin_constant_p (arg0
);
9522 /* Gimplification will pull the CALL_EXPR for the builtin out of
9523 an if condition. When not optimizing, we'll not CSE it back.
9524 To avoid link error types of regressions, return false now. */
9525 if (!val
&& !optimize
)
9526 val
= integer_zero_node
;
9531 case BUILT_IN_CLASSIFY_TYPE
:
9532 return fold_builtin_classify_type (arg0
);
9534 case BUILT_IN_STRLEN
:
9535 return fold_builtin_strlen (arg0
);
9537 CASE_FLT_FN (BUILT_IN_FABS
):
9538 return fold_builtin_fabs (arg0
, type
);
9542 case BUILT_IN_LLABS
:
9543 case BUILT_IN_IMAXABS
:
9544 return fold_builtin_abs (arg0
, type
);
9546 CASE_FLT_FN (BUILT_IN_CONJ
):
9547 if (validate_arg (arg0
, COMPLEX_TYPE
))
9548 return fold_build1 (CONJ_EXPR
, type
, arg0
);
9551 CASE_FLT_FN (BUILT_IN_CREAL
):
9552 if (validate_arg (arg0
, COMPLEX_TYPE
))
9553 return non_lvalue (fold_build1 (REALPART_EXPR
, type
, arg0
));;
9556 CASE_FLT_FN (BUILT_IN_CIMAG
):
9557 if (validate_arg (arg0
, COMPLEX_TYPE
))
9558 return non_lvalue (fold_build1 (IMAGPART_EXPR
, type
, arg0
));
9561 CASE_FLT_FN (BUILT_IN_CCOS
):
9562 CASE_FLT_FN (BUILT_IN_CCOSH
):
9563 /* These functions are "even", i.e. f(x) == f(-x). */
9564 if (validate_arg (arg0
, COMPLEX_TYPE
))
9566 tree narg
= fold_strip_sign_ops (arg0
);
9568 return build_call_expr (fndecl
, 1, narg
);
9572 CASE_FLT_FN (BUILT_IN_CABS
):
9573 return fold_builtin_cabs (arg0
, type
, fndecl
);
9575 CASE_FLT_FN (BUILT_IN_CARG
):
9576 return fold_builtin_carg (arg0
, type
);
9578 CASE_FLT_FN (BUILT_IN_SQRT
):
9579 return fold_builtin_sqrt (arg0
, type
);
9581 CASE_FLT_FN (BUILT_IN_CBRT
):
9582 return fold_builtin_cbrt (arg0
, type
);
9584 CASE_FLT_FN (BUILT_IN_ASIN
):
9585 if (validate_arg (arg0
, REAL_TYPE
))
9586 return do_mpfr_arg1 (arg0
, type
, mpfr_asin
,
9587 &dconstm1
, &dconst1
, true);
9590 CASE_FLT_FN (BUILT_IN_ACOS
):
9591 if (validate_arg (arg0
, REAL_TYPE
))
9592 return do_mpfr_arg1 (arg0
, type
, mpfr_acos
,
9593 &dconstm1
, &dconst1
, true);
9596 CASE_FLT_FN (BUILT_IN_ATAN
):
9597 if (validate_arg (arg0
, REAL_TYPE
))
9598 return do_mpfr_arg1 (arg0
, type
, mpfr_atan
, NULL
, NULL
, 0);
9601 CASE_FLT_FN (BUILT_IN_ASINH
):
9602 if (validate_arg (arg0
, REAL_TYPE
))
9603 return do_mpfr_arg1 (arg0
, type
, mpfr_asinh
, NULL
, NULL
, 0);
9606 CASE_FLT_FN (BUILT_IN_ACOSH
):
9607 if (validate_arg (arg0
, REAL_TYPE
))
9608 return do_mpfr_arg1 (arg0
, type
, mpfr_acosh
,
9609 &dconst1
, NULL
, true);
9612 CASE_FLT_FN (BUILT_IN_ATANH
):
9613 if (validate_arg (arg0
, REAL_TYPE
))
9614 return do_mpfr_arg1 (arg0
, type
, mpfr_atanh
,
9615 &dconstm1
, &dconst1
, false);
9618 CASE_FLT_FN (BUILT_IN_SIN
):
9619 if (validate_arg (arg0
, REAL_TYPE
))
9620 return do_mpfr_arg1 (arg0
, type
, mpfr_sin
, NULL
, NULL
, 0);
9623 CASE_FLT_FN (BUILT_IN_COS
):
9624 return fold_builtin_cos (arg0
, type
, fndecl
);
9627 CASE_FLT_FN (BUILT_IN_TAN
):
9628 return fold_builtin_tan (arg0
, type
);
9630 CASE_FLT_FN (BUILT_IN_CEXP
):
9631 return fold_builtin_cexp (arg0
, type
);
9633 CASE_FLT_FN (BUILT_IN_CEXPI
):
9634 if (validate_arg (arg0
, REAL_TYPE
))
9635 return do_mpfr_sincos (arg0
, NULL_TREE
, NULL_TREE
);
9638 CASE_FLT_FN (BUILT_IN_SINH
):
9639 if (validate_arg (arg0
, REAL_TYPE
))
9640 return do_mpfr_arg1 (arg0
, type
, mpfr_sinh
, NULL
, NULL
, 0);
9643 CASE_FLT_FN (BUILT_IN_COSH
):
9644 return fold_builtin_cosh (arg0
, type
, fndecl
);
9646 CASE_FLT_FN (BUILT_IN_TANH
):
9647 if (validate_arg (arg0
, REAL_TYPE
))
9648 return do_mpfr_arg1 (arg0
, type
, mpfr_tanh
, NULL
, NULL
, 0);
9651 CASE_FLT_FN (BUILT_IN_ERF
):
9652 if (validate_arg (arg0
, REAL_TYPE
))
9653 return do_mpfr_arg1 (arg0
, type
, mpfr_erf
, NULL
, NULL
, 0);
9656 CASE_FLT_FN (BUILT_IN_ERFC
):
9657 if (validate_arg (arg0
, REAL_TYPE
))
9658 return do_mpfr_arg1 (arg0
, type
, mpfr_erfc
, NULL
, NULL
, 0);
9661 CASE_FLT_FN (BUILT_IN_TGAMMA
):
9662 if (validate_arg (arg0
, REAL_TYPE
))
9663 return do_mpfr_arg1 (arg0
, type
, mpfr_gamma
, NULL
, NULL
, 0);
9666 CASE_FLT_FN (BUILT_IN_EXP
):
9667 return fold_builtin_exponent (fndecl
, arg0
, mpfr_exp
);
9669 CASE_FLT_FN (BUILT_IN_EXP2
):
9670 return fold_builtin_exponent (fndecl
, arg0
, mpfr_exp2
);
9672 CASE_FLT_FN (BUILT_IN_EXP10
):
9673 CASE_FLT_FN (BUILT_IN_POW10
):
9674 return fold_builtin_exponent (fndecl
, arg0
, mpfr_exp10
);
9676 CASE_FLT_FN (BUILT_IN_EXPM1
):
9677 if (validate_arg (arg0
, REAL_TYPE
))
9678 return do_mpfr_arg1 (arg0
, type
, mpfr_expm1
, NULL
, NULL
, 0);
9681 CASE_FLT_FN (BUILT_IN_LOG
):
9682 return fold_builtin_logarithm (fndecl
, arg0
, mpfr_log
);
9684 CASE_FLT_FN (BUILT_IN_LOG2
):
9685 return fold_builtin_logarithm (fndecl
, arg0
, mpfr_log2
);
9687 CASE_FLT_FN (BUILT_IN_LOG10
):
9688 return fold_builtin_logarithm (fndecl
, arg0
, mpfr_log10
);
9690 CASE_FLT_FN (BUILT_IN_LOG1P
):
9691 if (validate_arg (arg0
, REAL_TYPE
))
9692 return do_mpfr_arg1 (arg0
, type
, mpfr_log1p
,
9693 &dconstm1
, NULL
, false);
9696 CASE_FLT_FN (BUILT_IN_NAN
):
9697 case BUILT_IN_NAND32
:
9698 case BUILT_IN_NAND64
:
9699 case BUILT_IN_NAND128
:
9700 return fold_builtin_nan (arg0
, type
, true);
9702 CASE_FLT_FN (BUILT_IN_NANS
):
9703 return fold_builtin_nan (arg0
, type
, false);
9705 CASE_FLT_FN (BUILT_IN_FLOOR
):
9706 return fold_builtin_floor (fndecl
, arg0
);
9708 CASE_FLT_FN (BUILT_IN_CEIL
):
9709 return fold_builtin_ceil (fndecl
, arg0
);
9711 CASE_FLT_FN (BUILT_IN_TRUNC
):
9712 return fold_builtin_trunc (fndecl
, arg0
);
9714 CASE_FLT_FN (BUILT_IN_ROUND
):
9715 return fold_builtin_round (fndecl
, arg0
);
9717 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
9718 CASE_FLT_FN (BUILT_IN_RINT
):
9719 return fold_trunc_transparent_mathfn (fndecl
, arg0
);
9721 CASE_FLT_FN (BUILT_IN_LCEIL
):
9722 CASE_FLT_FN (BUILT_IN_LLCEIL
):
9723 CASE_FLT_FN (BUILT_IN_LFLOOR
):
9724 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
9725 CASE_FLT_FN (BUILT_IN_LROUND
):
9726 CASE_FLT_FN (BUILT_IN_LLROUND
):
9727 return fold_builtin_int_roundingfn (fndecl
, arg0
);
9729 CASE_FLT_FN (BUILT_IN_LRINT
):
9730 CASE_FLT_FN (BUILT_IN_LLRINT
):
9731 return fold_fixed_mathfn (fndecl
, arg0
);
9733 case BUILT_IN_BSWAP32
:
9734 case BUILT_IN_BSWAP64
:
9735 return fold_builtin_bswap (fndecl
, arg0
);
9737 CASE_INT_FN (BUILT_IN_FFS
):
9738 CASE_INT_FN (BUILT_IN_CLZ
):
9739 CASE_INT_FN (BUILT_IN_CTZ
):
9740 CASE_INT_FN (BUILT_IN_POPCOUNT
):
9741 CASE_INT_FN (BUILT_IN_PARITY
):
9742 return fold_builtin_bitop (fndecl
, arg0
);
9744 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
9745 return fold_builtin_signbit (arg0
, type
);
9747 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
9748 return fold_builtin_significand (arg0
, type
);
9750 CASE_FLT_FN (BUILT_IN_ILOGB
):
9751 CASE_FLT_FN (BUILT_IN_LOGB
):
9752 return fold_builtin_logb (arg0
, type
);
9754 case BUILT_IN_ISASCII
:
9755 return fold_builtin_isascii (arg0
);
9757 case BUILT_IN_TOASCII
:
9758 return fold_builtin_toascii (arg0
);
9760 case BUILT_IN_ISDIGIT
:
9761 return fold_builtin_isdigit (arg0
);
9763 CASE_FLT_FN (BUILT_IN_FINITE
):
9764 case BUILT_IN_FINITED32
:
9765 case BUILT_IN_FINITED64
:
9766 case BUILT_IN_FINITED128
:
9767 return fold_builtin_classify (fndecl
, arg0
, BUILT_IN_FINITE
);
9769 CASE_FLT_FN (BUILT_IN_ISINF
):
9770 case BUILT_IN_ISINFD32
:
9771 case BUILT_IN_ISINFD64
:
9772 case BUILT_IN_ISINFD128
:
9773 return fold_builtin_classify (fndecl
, arg0
, BUILT_IN_ISINF
);
9775 CASE_FLT_FN (BUILT_IN_ISNAN
):
9776 case BUILT_IN_ISNAND32
:
9777 case BUILT_IN_ISNAND64
:
9778 case BUILT_IN_ISNAND128
:
9779 return fold_builtin_classify (fndecl
, arg0
, BUILT_IN_ISNAN
);
9781 case BUILT_IN_PRINTF
:
9782 case BUILT_IN_PRINTF_UNLOCKED
:
9783 case BUILT_IN_VPRINTF
:
9784 return fold_builtin_printf (fndecl
, arg0
, NULL_TREE
, ignore
, fcode
);
9794 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9795 IGNORE is true if the result of the function call is ignored. This
9796 function returns NULL_TREE if no simplification was possible. */
9799 fold_builtin_2 (tree fndecl
, tree arg0
, tree arg1
, bool ignore
)
9801 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9802 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9807 CASE_FLT_FN (BUILT_IN_ATAN2
):
9808 if (validate_arg (arg0
, REAL_TYPE
)
9809 && validate_arg(arg1
, REAL_TYPE
))
9810 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_atan2
);
9813 CASE_FLT_FN (BUILT_IN_FDIM
):
9814 if (validate_arg (arg0
, REAL_TYPE
)
9815 && validate_arg(arg1
, REAL_TYPE
))
9816 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_dim
);
9819 CASE_FLT_FN (BUILT_IN_HYPOT
):
9820 return fold_builtin_hypot (fndecl
, arg0
, arg1
, type
);
9822 CASE_FLT_FN (BUILT_IN_LDEXP
):
9823 return fold_builtin_load_exponent (arg0
, arg1
, type
, /*ldexp=*/true);
9824 CASE_FLT_FN (BUILT_IN_SCALBN
):
9825 CASE_FLT_FN (BUILT_IN_SCALBLN
):
9826 return fold_builtin_load_exponent (arg0
, arg1
, type
, /*ldexp=*/false);
9828 CASE_FLT_FN (BUILT_IN_FREXP
):
9829 return fold_builtin_frexp (arg0
, arg1
, type
);
9831 CASE_FLT_FN (BUILT_IN_MODF
):
9832 return fold_builtin_modf (arg0
, arg1
, type
);
9834 case BUILT_IN_BZERO
:
9835 return fold_builtin_bzero (arg0
, arg1
, ignore
);
9837 case BUILT_IN_FPUTS
:
9838 return fold_builtin_fputs (arg0
, arg1
, ignore
, false, NULL_TREE
);
9840 case BUILT_IN_FPUTS_UNLOCKED
:
9841 return fold_builtin_fputs (arg0
, arg1
, ignore
, true, NULL_TREE
);
9843 case BUILT_IN_STRSTR
:
9844 return fold_builtin_strstr (arg0
, arg1
, type
);
9846 case BUILT_IN_STRCAT
:
9847 return fold_builtin_strcat (arg0
, arg1
);
9849 case BUILT_IN_STRSPN
:
9850 return fold_builtin_strspn (arg0
, arg1
);
9852 case BUILT_IN_STRCSPN
:
9853 return fold_builtin_strcspn (arg0
, arg1
);
9855 case BUILT_IN_STRCHR
:
9856 case BUILT_IN_INDEX
:
9857 return fold_builtin_strchr (arg0
, arg1
, type
);
9859 case BUILT_IN_STRRCHR
:
9860 case BUILT_IN_RINDEX
:
9861 return fold_builtin_strrchr (arg0
, arg1
, type
);
9863 case BUILT_IN_STRCPY
:
9864 return fold_builtin_strcpy (fndecl
, arg0
, arg1
, NULL_TREE
);
9866 case BUILT_IN_STRCMP
:
9867 return fold_builtin_strcmp (arg0
, arg1
);
9869 case BUILT_IN_STRPBRK
:
9870 return fold_builtin_strpbrk (arg0
, arg1
, type
);
9872 case BUILT_IN_EXPECT
:
9873 return fold_builtin_expect (arg0
);
9875 CASE_FLT_FN (BUILT_IN_POW
):
9876 return fold_builtin_pow (fndecl
, arg0
, arg1
, type
);
9878 CASE_FLT_FN (BUILT_IN_POWI
):
9879 return fold_builtin_powi (fndecl
, arg0
, arg1
, type
);
9881 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
9882 return fold_builtin_copysign (fndecl
, arg0
, arg1
, type
);
9884 CASE_FLT_FN (BUILT_IN_FMIN
):
9885 return fold_builtin_fmin_fmax (arg0
, arg1
, type
, /*max=*/false);
9887 CASE_FLT_FN (BUILT_IN_FMAX
):
9888 return fold_builtin_fmin_fmax (arg0
, arg1
, type
, /*max=*/true);
9890 case BUILT_IN_ISGREATER
:
9891 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
9892 case BUILT_IN_ISGREATEREQUAL
:
9893 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
9894 case BUILT_IN_ISLESS
:
9895 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
9896 case BUILT_IN_ISLESSEQUAL
:
9897 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
9898 case BUILT_IN_ISLESSGREATER
:
9899 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
9900 case BUILT_IN_ISUNORDERED
:
9901 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNORDERED_EXPR
,
9904 /* We do the folding for va_start in the expander. */
9905 case BUILT_IN_VA_START
:
9908 case BUILT_IN_SPRINTF
:
9909 return fold_builtin_sprintf (arg0
, arg1
, NULL_TREE
, ignore
);
9911 case BUILT_IN_OBJECT_SIZE
:
9912 return fold_builtin_object_size (arg0
, arg1
);
9914 case BUILT_IN_PRINTF
:
9915 case BUILT_IN_PRINTF_UNLOCKED
:
9916 case BUILT_IN_VPRINTF
:
9917 return fold_builtin_printf (fndecl
, arg0
, arg1
, ignore
, fcode
);
9919 case BUILT_IN_PRINTF_CHK
:
9920 case BUILT_IN_VPRINTF_CHK
:
9921 if (!validate_arg (arg0
, INTEGER_TYPE
)
9922 || TREE_SIDE_EFFECTS (arg0
))
9925 return fold_builtin_printf (fndecl
, arg1
, NULL_TREE
, ignore
, fcode
);
9928 case BUILT_IN_FPRINTF
:
9929 case BUILT_IN_FPRINTF_UNLOCKED
:
9930 case BUILT_IN_VFPRINTF
:
9931 return fold_builtin_fprintf (fndecl
, arg0
, arg1
, NULL_TREE
,
9940 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9941 and ARG2. IGNORE is true if the result of the function call is ignored.
9942 This function returns NULL_TREE if no simplification was possible. */
9945 fold_builtin_3 (tree fndecl
, tree arg0
, tree arg1
, tree arg2
, bool ignore
)
9947 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9948 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9952 CASE_FLT_FN (BUILT_IN_SINCOS
):
9953 return fold_builtin_sincos (arg0
, arg1
, arg2
);
9955 CASE_FLT_FN (BUILT_IN_FMA
):
9956 if (validate_arg (arg0
, REAL_TYPE
)
9957 && validate_arg(arg1
, REAL_TYPE
)
9958 && validate_arg(arg2
, REAL_TYPE
))
9959 return do_mpfr_arg3 (arg0
, arg1
, arg2
, type
, mpfr_fma
);
9962 case BUILT_IN_MEMSET
:
9963 return fold_builtin_memset (arg0
, arg1
, arg2
, type
, ignore
);
9965 case BUILT_IN_BCOPY
:
9966 return fold_builtin_memory_op (arg1
, arg0
, arg2
, void_type_node
, true, /*endp=*/3);
9968 case BUILT_IN_MEMCPY
:
9969 return fold_builtin_memory_op (arg0
, arg1
, arg2
, type
, ignore
, /*endp=*/0);
9971 case BUILT_IN_MEMPCPY
:
9972 return fold_builtin_memory_op (arg0
, arg1
, arg2
, type
, ignore
, /*endp=*/1);
9974 case BUILT_IN_MEMMOVE
:
9975 return fold_builtin_memory_op (arg0
, arg1
, arg2
, type
, ignore
, /*endp=*/3);
9977 case BUILT_IN_STRNCAT
:
9978 return fold_builtin_strncat (arg0
, arg1
, arg2
);
9980 case BUILT_IN_STRNCPY
:
9981 return fold_builtin_strncpy (fndecl
, arg0
, arg1
, arg2
, NULL_TREE
);
9983 case BUILT_IN_STRNCMP
:
9984 return fold_builtin_strncmp (arg0
, arg1
, arg2
);
9987 case BUILT_IN_MEMCMP
:
9988 return fold_builtin_memcmp (arg0
, arg1
, arg2
);;
9990 case BUILT_IN_SPRINTF
:
9991 return fold_builtin_sprintf (arg0
, arg1
, arg2
, ignore
);
9993 case BUILT_IN_STRCPY_CHK
:
9994 case BUILT_IN_STPCPY_CHK
:
9995 return fold_builtin_stxcpy_chk (fndecl
, arg0
, arg1
, arg2
, NULL_TREE
,
9998 case BUILT_IN_STRCAT_CHK
:
9999 return fold_builtin_strcat_chk (fndecl
, arg0
, arg1
, arg2
);
10001 case BUILT_IN_PRINTF_CHK
:
10002 case BUILT_IN_VPRINTF_CHK
:
10003 if (!validate_arg (arg0
, INTEGER_TYPE
)
10004 || TREE_SIDE_EFFECTS (arg0
))
10007 return fold_builtin_printf (fndecl
, arg1
, arg2
, ignore
, fcode
);
10010 case BUILT_IN_FPRINTF
:
10011 case BUILT_IN_FPRINTF_UNLOCKED
:
10012 case BUILT_IN_VFPRINTF
:
10013 return fold_builtin_fprintf (fndecl
, arg0
, arg1
, arg2
, ignore
, fcode
);
10015 case BUILT_IN_FPRINTF_CHK
:
10016 case BUILT_IN_VFPRINTF_CHK
:
10017 if (!validate_arg (arg1
, INTEGER_TYPE
)
10018 || TREE_SIDE_EFFECTS (arg1
))
10021 return fold_builtin_fprintf (fndecl
, arg0
, arg2
, NULL_TREE
,
10030 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
10031 ARG2, and ARG3. IGNORE is true if the result of the function call is
10032 ignored. This function returns NULL_TREE if no simplification was
10036 fold_builtin_4 (tree fndecl
, tree arg0
, tree arg1
, tree arg2
, tree arg3
,
10039 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10043 case BUILT_IN_MEMCPY_CHK
:
10044 case BUILT_IN_MEMPCPY_CHK
:
10045 case BUILT_IN_MEMMOVE_CHK
:
10046 case BUILT_IN_MEMSET_CHK
:
10047 return fold_builtin_memory_chk (fndecl
, arg0
, arg1
, arg2
, arg3
,
10049 DECL_FUNCTION_CODE (fndecl
));
10051 case BUILT_IN_STRNCPY_CHK
:
10052 return fold_builtin_strncpy_chk (arg0
, arg1
, arg2
, arg3
, NULL_TREE
);
10054 case BUILT_IN_STRNCAT_CHK
:
10055 return fold_builtin_strncat_chk (fndecl
, arg0
, arg1
, arg2
, arg3
);
10057 case BUILT_IN_FPRINTF_CHK
:
10058 case BUILT_IN_VFPRINTF_CHK
:
10059 if (!validate_arg (arg1
, INTEGER_TYPE
)
10060 || TREE_SIDE_EFFECTS (arg1
))
10063 return fold_builtin_fprintf (fndecl
, arg0
, arg2
, arg3
,
10073 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
10074 arguments, where NARGS <= 4. IGNORE is true if the result of the
10075 function call is ignored. This function returns NULL_TREE if no
10076 simplification was possible. Note that this only folds builtins with
10077 fixed argument patterns. Foldings that do varargs-to-varargs
10078 transformations, or that match calls with more than 4 arguments,
10079 need to be handled with fold_builtin_varargs instead. */
10081 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10084 fold_builtin_n (tree fndecl
, tree
*args
, int nargs
, bool ignore
)
10086 tree ret
= NULL_TREE
;
10090 ret
= fold_builtin_0 (fndecl
, ignore
);
10093 ret
= fold_builtin_1 (fndecl
, args
[0], ignore
);
10096 ret
= fold_builtin_2 (fndecl
, args
[0], args
[1], ignore
);
10099 ret
= fold_builtin_3 (fndecl
, args
[0], args
[1], args
[2], ignore
);
10102 ret
= fold_builtin_4 (fndecl
, args
[0], args
[1], args
[2], args
[3],
10110 ret
= build1 (NOP_EXPR
, GENERIC_TREE_TYPE (ret
), ret
);
10111 TREE_NO_WARNING (ret
) = 1;
10117 /* Builtins with folding operations that operate on "..." arguments
10118 need special handling; we need to store the arguments in a convenient
10119 data structure before attempting any folding. Fortunately there are
10120 only a few builtins that fall into this category. FNDECL is the
10121 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10122 result of the function call is ignored. */
10125 fold_builtin_varargs (tree fndecl
, tree exp
, bool ignore ATTRIBUTE_UNUSED
)
10127 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10128 tree ret
= NULL_TREE
;
10132 case BUILT_IN_SPRINTF_CHK
:
10133 case BUILT_IN_VSPRINTF_CHK
:
10134 ret
= fold_builtin_sprintf_chk (exp
, fcode
);
10137 case BUILT_IN_SNPRINTF_CHK
:
10138 case BUILT_IN_VSNPRINTF_CHK
:
10139 ret
= fold_builtin_snprintf_chk (exp
, NULL_TREE
, fcode
);
10146 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10147 TREE_NO_WARNING (ret
) = 1;
10153 /* A wrapper function for builtin folding that prevents warnings for
10154 "statement without effect" and the like, caused by removing the
10155 call node earlier than the warning is generated. */
10158 fold_call_expr (tree exp
, bool ignore
)
10160 tree ret
= NULL_TREE
;
10161 tree fndecl
= get_callee_fndecl (exp
);
10163 && TREE_CODE (fndecl
) == FUNCTION_DECL
10164 && DECL_BUILT_IN (fndecl
))
10166 /* FIXME: Don't use a list in this interface. */
10167 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10168 return targetm
.fold_builtin (fndecl
, CALL_EXPR_ARGS (exp
), ignore
);
10171 int nargs
= call_expr_nargs (exp
);
10172 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
10174 tree
*args
= CALL_EXPR_ARGP (exp
);
10175 ret
= fold_builtin_n (fndecl
, args
, nargs
, ignore
);
10178 ret
= fold_builtin_varargs (fndecl
, exp
, ignore
);
10181 /* Propagate location information from original call to
10182 expansion of builtin. Otherwise things like
10183 maybe_emit_chk_warning, that operate on the expansion
10184 of a builtin, will use the wrong location information. */
10185 if (CAN_HAVE_LOCATION_P (exp
) && EXPR_HAS_LOCATION (exp
))
10187 tree realret
= ret
;
10188 if (TREE_CODE (ret
) == NOP_EXPR
)
10189 realret
= TREE_OPERAND (ret
, 0);
10190 if (CAN_HAVE_LOCATION_P (realret
)
10191 && !EXPR_HAS_LOCATION (realret
))
10192 SET_EXPR_LOCATION (realret
, EXPR_LOCATION (exp
));
10201 /* Conveniently construct a function call expression. FNDECL names the
10202 function to be called and ARGLIST is a TREE_LIST of arguments. */
10205 build_function_call_expr (tree fndecl
, tree arglist
)
10207 tree fntype
= TREE_TYPE (fndecl
);
10208 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10209 int n
= list_length (arglist
);
10210 tree
*argarray
= (tree
*) alloca (n
* sizeof (tree
));
10213 for (i
= 0; i
< n
; i
++, arglist
= TREE_CHAIN (arglist
))
10214 argarray
[i
] = TREE_VALUE (arglist
);
10215 return fold_builtin_call_array (TREE_TYPE (fntype
), fn
, n
, argarray
);
10218 /* Conveniently construct a function call expression. FNDECL names the
10219 function to be called, N is the number of arguments, and the "..."
10220 parameters are the argument expressions. */
10223 build_call_expr (tree fndecl
, int n
, ...)
10226 tree fntype
= TREE_TYPE (fndecl
);
10227 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10228 tree
*argarray
= (tree
*) alloca (n
* sizeof (tree
));
10232 for (i
= 0; i
< n
; i
++)
10233 argarray
[i
] = va_arg (ap
, tree
);
10235 return fold_builtin_call_array (TREE_TYPE (fntype
), fn
, n
, argarray
);
10238 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10239 N arguments are passed in the array ARGARRAY. */
10242 fold_builtin_call_array (tree type
,
10247 tree ret
= NULL_TREE
;
10251 if (TREE_CODE (fn
) == ADDR_EXPR
)
10253 tree fndecl
= TREE_OPERAND (fn
, 0);
10254 if (TREE_CODE (fndecl
) == FUNCTION_DECL
10255 && DECL_BUILT_IN (fndecl
))
10257 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10259 tree arglist
= NULL_TREE
;
10260 for (i
= n
- 1; i
>= 0; i
--)
10261 arglist
= tree_cons (NULL_TREE
, argarray
[i
], arglist
);
10262 ret
= targetm
.fold_builtin (fndecl
, arglist
, false);
10266 else if (n
<= MAX_ARGS_TO_FOLD_BUILTIN
)
10268 /* First try the transformations that don't require consing up
10270 ret
= fold_builtin_n (fndecl
, argarray
, n
, false);
10275 /* If we got this far, we need to build an exp. */
10276 exp
= build_call_array (type
, fn
, n
, argarray
);
10277 ret
= fold_builtin_varargs (fndecl
, exp
, false);
10278 return ret
? ret
: exp
;
10282 return build_call_array (type
, fn
, n
, argarray
);
10285 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10286 along with N new arguments specified as the "..." parameters. SKIP
10287 is the number of arguments in EXP to be omitted. This function is used
10288 to do varargs-to-varargs transformations. */
10291 rewrite_call_expr (tree exp
, int skip
, tree fndecl
, int n
, ...)
10293 int oldnargs
= call_expr_nargs (exp
);
10294 int nargs
= oldnargs
- skip
+ n
;
10295 tree fntype
= TREE_TYPE (fndecl
);
10296 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10304 buffer
= alloca (nargs
* sizeof (tree
));
10306 for (i
= 0; i
< n
; i
++)
10307 buffer
[i
] = va_arg (ap
, tree
);
10309 for (j
= skip
; j
< oldnargs
; j
++, i
++)
10310 buffer
[i
] = CALL_EXPR_ARG (exp
, j
);
10313 buffer
= CALL_EXPR_ARGP (exp
) + skip
;
10315 return fold (build_call_array (TREE_TYPE (exp
), fn
, nargs
, buffer
));
10318 /* Validate a single argument ARG against a tree code CODE representing
10322 validate_arg (tree arg
, enum tree_code code
)
10326 else if (code
== POINTER_TYPE
)
10327 return POINTER_TYPE_P (TREE_TYPE (arg
));
10328 return code
== TREE_CODE (TREE_TYPE (arg
));
10331 /* This function validates the types of a function call argument list
10332 against a specified list of tree_codes. If the last specifier is a 0,
10333 that represents an ellipses, otherwise the last specifier must be a
10337 validate_arglist (tree callexpr
, ...)
10339 enum tree_code code
;
10342 call_expr_arg_iterator iter
;
10345 va_start (ap
, callexpr
);
10346 init_call_expr_arg_iterator (callexpr
, &iter
);
10350 code
= va_arg (ap
, enum tree_code
);
10354 /* This signifies an ellipses, any further arguments are all ok. */
10358 /* This signifies an endlink, if no arguments remain, return
10359 true, otherwise return false. */
10360 res
= !more_call_expr_args_p (&iter
);
10363 /* If no parameters remain or the parameter's code does not
10364 match the specified code, return false. Otherwise continue
10365 checking any remaining arguments. */
10366 arg
= next_call_expr_arg (&iter
);
10367 if (!validate_arg (arg
, code
))
10374 /* We need gotos here since we can only have one VA_CLOSE in a
10382 /* Default target-specific builtin expander that does nothing. */
10385 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
10386 rtx target ATTRIBUTE_UNUSED
,
10387 rtx subtarget ATTRIBUTE_UNUSED
,
10388 enum machine_mode mode ATTRIBUTE_UNUSED
,
10389 int ignore ATTRIBUTE_UNUSED
)
10394 /* Returns true is EXP represents data that would potentially reside
10395 in a readonly section. */
10398 readonly_data_expr (tree exp
)
10402 if (TREE_CODE (exp
) != ADDR_EXPR
)
10405 exp
= get_base_address (TREE_OPERAND (exp
, 0));
10409 /* Make sure we call decl_readonly_section only for trees it
10410 can handle (since it returns true for everything it doesn't
10412 if (TREE_CODE (exp
) == STRING_CST
10413 || TREE_CODE (exp
) == CONSTRUCTOR
10414 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
10415 return decl_readonly_section (exp
, 0);
10420 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10421 to the call, and TYPE is its return type.
10423 Return NULL_TREE if no simplification was possible, otherwise return the
10424 simplified form of the call as a tree.
10426 The simplified form may be a constant or other expression which
10427 computes the same value, but in a more efficient manner (including
10428 calls to other builtin functions).
10430 The call may contain arguments which need to be evaluated, but
10431 which are not useful to determine the result of the call. In
10432 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10433 COMPOUND_EXPR will be an argument which must be evaluated.
10434 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10435 COMPOUND_EXPR in the chain will contain the tree for the simplified
10436 form of the builtin function call. */
10439 fold_builtin_strstr (tree s1
, tree s2
, tree type
)
10441 if (!validate_arg (s1
, POINTER_TYPE
)
10442 || !validate_arg (s2
, POINTER_TYPE
))
10447 const char *p1
, *p2
;
10449 p2
= c_getstr (s2
);
10453 p1
= c_getstr (s1
);
10456 const char *r
= strstr (p1
, p2
);
10460 return build_int_cst (TREE_TYPE (s1
), 0);
10462 /* Return an offset into the constant string argument. */
10463 tem
= fold_build2 (PLUS_EXPR
, TREE_TYPE (s1
),
10464 s1
, build_int_cst (TREE_TYPE (s1
), r
- p1
));
10465 return fold_convert (type
, tem
);
10468 /* The argument is const char *, and the result is char *, so we need
10469 a type conversion here to avoid a warning. */
10471 return fold_convert (type
, s1
);
10476 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
10480 /* New argument list transforming strstr(s1, s2) to
10481 strchr(s1, s2[0]). */
10482 return build_call_expr (fn
, 2, s1
, build_int_cst (NULL_TREE
, p2
[0]));
10486 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10487 the call, and TYPE is its return type.
10489 Return NULL_TREE if no simplification was possible, otherwise return the
10490 simplified form of the call as a tree.
10492 The simplified form may be a constant or other expression which
10493 computes the same value, but in a more efficient manner (including
10494 calls to other builtin functions).
10496 The call may contain arguments which need to be evaluated, but
10497 which are not useful to determine the result of the call. In
10498 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10499 COMPOUND_EXPR will be an argument which must be evaluated.
10500 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10501 COMPOUND_EXPR in the chain will contain the tree for the simplified
10502 form of the builtin function call. */
10505 fold_builtin_strchr (tree s1
, tree s2
, tree type
)
10507 if (!validate_arg (s1
, POINTER_TYPE
)
10508 || !validate_arg (s2
, INTEGER_TYPE
))
10514 if (TREE_CODE (s2
) != INTEGER_CST
)
10517 p1
= c_getstr (s1
);
10524 if (target_char_cast (s2
, &c
))
10527 r
= strchr (p1
, c
);
10530 return build_int_cst (TREE_TYPE (s1
), 0);
10532 /* Return an offset into the constant string argument. */
10533 tem
= fold_build2 (PLUS_EXPR
, TREE_TYPE (s1
),
10534 s1
, build_int_cst (TREE_TYPE (s1
), r
- p1
));
10535 return fold_convert (type
, tem
);
10541 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10542 the call, and TYPE is its return type.
10544 Return NULL_TREE if no simplification was possible, otherwise return the
10545 simplified form of the call as a tree.
10547 The simplified form may be a constant or other expression which
10548 computes the same value, but in a more efficient manner (including
10549 calls to other builtin functions).
10551 The call may contain arguments which need to be evaluated, but
10552 which are not useful to determine the result of the call. In
10553 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10554 COMPOUND_EXPR will be an argument which must be evaluated.
10555 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10556 COMPOUND_EXPR in the chain will contain the tree for the simplified
10557 form of the builtin function call. */
10560 fold_builtin_strrchr (tree s1
, tree s2
, tree type
)
10562 if (!validate_arg (s1
, POINTER_TYPE
)
10563 || !validate_arg (s2
, INTEGER_TYPE
))
10570 if (TREE_CODE (s2
) != INTEGER_CST
)
10573 p1
= c_getstr (s1
);
10580 if (target_char_cast (s2
, &c
))
10583 r
= strrchr (p1
, c
);
10586 return build_int_cst (TREE_TYPE (s1
), 0);
10588 /* Return an offset into the constant string argument. */
10589 tem
= fold_build2 (PLUS_EXPR
, TREE_TYPE (s1
),
10590 s1
, build_int_cst (TREE_TYPE (s1
), r
- p1
));
10591 return fold_convert (type
, tem
);
10594 if (! integer_zerop (s2
))
10597 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
10601 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10602 return build_call_expr (fn
, 2, s1
, s2
);
10606 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10607 to the call, and TYPE is its return type.
10609 Return NULL_TREE if no simplification was possible, otherwise return the
10610 simplified form of the call as a tree.
10612 The simplified form may be a constant or other expression which
10613 computes the same value, but in a more efficient manner (including
10614 calls to other builtin functions).
10616 The call may contain arguments which need to be evaluated, but
10617 which are not useful to determine the result of the call. In
10618 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10619 COMPOUND_EXPR will be an argument which must be evaluated.
10620 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10621 COMPOUND_EXPR in the chain will contain the tree for the simplified
10622 form of the builtin function call. */
10625 fold_builtin_strpbrk (tree s1
, tree s2
, tree type
)
10627 if (!validate_arg (s1
, POINTER_TYPE
)
10628 || !validate_arg (s2
, POINTER_TYPE
))
10633 const char *p1
, *p2
;
10635 p2
= c_getstr (s2
);
10639 p1
= c_getstr (s1
);
10642 const char *r
= strpbrk (p1
, p2
);
10646 return build_int_cst (TREE_TYPE (s1
), 0);
10648 /* Return an offset into the constant string argument. */
10649 tem
= fold_build2 (PLUS_EXPR
, TREE_TYPE (s1
),
10650 s1
, build_int_cst (TREE_TYPE (s1
), r
- p1
));
10651 return fold_convert (type
, tem
);
10655 /* strpbrk(x, "") == NULL.
10656 Evaluate and ignore s1 in case it had side-effects. */
10657 return omit_one_operand (TREE_TYPE (s1
), integer_zero_node
, s1
);
10660 return NULL_TREE
; /* Really call strpbrk. */
10662 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
10666 /* New argument list transforming strpbrk(s1, s2) to
10667 strchr(s1, s2[0]). */
10668 return build_call_expr (fn
, 2, s1
, build_int_cst (NULL_TREE
, p2
[0]));
10672 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
10675 Return NULL_TREE if no simplification was possible, otherwise return the
10676 simplified form of the call as a tree.
10678 The simplified form may be a constant or other expression which
10679 computes the same value, but in a more efficient manner (including
10680 calls to other builtin functions).
10682 The call may contain arguments which need to be evaluated, but
10683 which are not useful to determine the result of the call. In
10684 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10685 COMPOUND_EXPR will be an argument which must be evaluated.
10686 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10687 COMPOUND_EXPR in the chain will contain the tree for the simplified
10688 form of the builtin function call. */
10691 fold_builtin_strcat (tree dst
, tree src
)
10693 if (!validate_arg (dst
, POINTER_TYPE
)
10694 || !validate_arg (src
, POINTER_TYPE
))
10698 const char *p
= c_getstr (src
);
10700 /* If the string length is zero, return the dst parameter. */
10701 if (p
&& *p
== '\0')
10708 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
10709 arguments to the call.
10711 Return NULL_TREE if no simplification was possible, otherwise return the
10712 simplified form of the call as a tree.
10714 The simplified form may be a constant or other expression which
10715 computes the same value, but in a more efficient manner (including
10716 calls to other builtin functions).
10718 The call may contain arguments which need to be evaluated, but
10719 which are not useful to determine the result of the call. In
10720 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10721 COMPOUND_EXPR will be an argument which must be evaluated.
10722 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10723 COMPOUND_EXPR in the chain will contain the tree for the simplified
10724 form of the builtin function call. */
10727 fold_builtin_strncat (tree dst
, tree src
, tree len
)
10729 if (!validate_arg (dst
, POINTER_TYPE
)
10730 || !validate_arg (src
, POINTER_TYPE
)
10731 || !validate_arg (len
, INTEGER_TYPE
))
10735 const char *p
= c_getstr (src
);
10737 /* If the requested length is zero, or the src parameter string
10738 length is zero, return the dst parameter. */
10739 if (integer_zerop (len
) || (p
&& *p
== '\0'))
10740 return omit_two_operands (TREE_TYPE (dst
), dst
, src
, len
);
10742 /* If the requested len is greater than or equal to the string
10743 length, call strcat. */
10744 if (TREE_CODE (len
) == INTEGER_CST
&& p
10745 && compare_tree_int (len
, strlen (p
)) >= 0)
10747 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCAT
];
10749 /* If the replacement _DECL isn't initialized, don't do the
10754 return build_call_expr (fn
, 2, dst
, src
);
10760 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10763 Return NULL_TREE if no simplification was possible, otherwise return the
10764 simplified form of the call as a tree.
10766 The simplified form may be a constant or other expression which
10767 computes the same value, but in a more efficient manner (including
10768 calls to other builtin functions).
10770 The call may contain arguments which need to be evaluated, but
10771 which are not useful to determine the result of the call. In
10772 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10773 COMPOUND_EXPR will be an argument which must be evaluated.
10774 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10775 COMPOUND_EXPR in the chain will contain the tree for the simplified
10776 form of the builtin function call. */
10779 fold_builtin_strspn (tree s1
, tree s2
)
10781 if (!validate_arg (s1
, POINTER_TYPE
)
10782 || !validate_arg (s2
, POINTER_TYPE
))
10786 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
10788 /* If both arguments are constants, evaluate at compile-time. */
10791 const size_t r
= strspn (p1
, p2
);
10792 return size_int (r
);
10795 /* If either argument is "", return NULL_TREE. */
10796 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
10797 /* Evaluate and ignore both arguments in case either one has
10799 return omit_two_operands (integer_type_node
, integer_zero_node
,
10805 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10808 Return NULL_TREE if no simplification was possible, otherwise return the
10809 simplified form of the call as a tree.
10811 The simplified form may be a constant or other expression which
10812 computes the same value, but in a more efficient manner (including
10813 calls to other builtin functions).
10815 The call may contain arguments which need to be evaluated, but
10816 which are not useful to determine the result of the call. In
10817 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10818 COMPOUND_EXPR will be an argument which must be evaluated.
10819 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10820 COMPOUND_EXPR in the chain will contain the tree for the simplified
10821 form of the builtin function call. */
10824 fold_builtin_strcspn (tree s1
, tree s2
)
10826 if (!validate_arg (s1
, POINTER_TYPE
)
10827 || !validate_arg (s2
, POINTER_TYPE
))
10831 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
10833 /* If both arguments are constants, evaluate at compile-time. */
10836 const size_t r
= strcspn (p1
, p2
);
10837 return size_int (r
);
10840 /* If the first argument is "", return NULL_TREE. */
10841 if (p1
&& *p1
== '\0')
10843 /* Evaluate and ignore argument s2 in case it has
10845 return omit_one_operand (integer_type_node
,
10846 integer_zero_node
, s2
);
10849 /* If the second argument is "", return __builtin_strlen(s1). */
10850 if (p2
&& *p2
== '\0')
10852 tree fn
= implicit_built_in_decls
[BUILT_IN_STRLEN
];
10854 /* If the replacement _DECL isn't initialized, don't do the
10859 return build_call_expr (fn
, 1, s1
);
10865 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
10866 to the call. IGNORE is true if the value returned
10867 by the builtin will be ignored. UNLOCKED is true is true if this
10868 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
10869 the known length of the string. Return NULL_TREE if no simplification
10873 fold_builtin_fputs (tree arg0
, tree arg1
, bool ignore
, bool unlocked
, tree len
)
10875 /* If we're using an unlocked function, assume the other unlocked
10876 functions exist explicitly. */
10877 tree
const fn_fputc
= unlocked
? built_in_decls
[BUILT_IN_FPUTC_UNLOCKED
]
10878 : implicit_built_in_decls
[BUILT_IN_FPUTC
];
10879 tree
const fn_fwrite
= unlocked
? built_in_decls
[BUILT_IN_FWRITE_UNLOCKED
]
10880 : implicit_built_in_decls
[BUILT_IN_FWRITE
];
10882 /* If the return value is used, don't do the transformation. */
10886 /* Verify the arguments in the original call. */
10887 if (!validate_arg (arg0
, POINTER_TYPE
)
10888 || !validate_arg (arg1
, POINTER_TYPE
))
10892 len
= c_strlen (arg0
, 0);
10894 /* Get the length of the string passed to fputs. If the length
10895 can't be determined, punt. */
10897 || TREE_CODE (len
) != INTEGER_CST
)
10900 switch (compare_tree_int (len
, 1))
10902 case -1: /* length is 0, delete the call entirely . */
10903 return omit_one_operand (integer_type_node
, integer_zero_node
, arg1
);;
10905 case 0: /* length is 1, call fputc. */
10907 const char *p
= c_getstr (arg0
);
10912 return build_call_expr (fn_fputc
, 2,
10913 build_int_cst (NULL_TREE
, p
[0]), arg1
);
10919 case 1: /* length is greater than 1, call fwrite. */
10921 /* If optimizing for size keep fputs. */
10924 /* New argument list transforming fputs(string, stream) to
10925 fwrite(string, 1, len, stream). */
10927 return build_call_expr (fn_fwrite
, 4, arg0
, size_one_node
, len
, arg1
);
10932 gcc_unreachable ();
10937 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10938 produced. False otherwise. This is done so that we don't output the error
10939 or warning twice or three times. */
10941 fold_builtin_next_arg (tree exp
, bool va_start_p
)
10943 tree fntype
= TREE_TYPE (current_function_decl
);
10944 int nargs
= call_expr_nargs (exp
);
10947 if (TYPE_ARG_TYPES (fntype
) == 0
10948 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
10949 == void_type_node
))
10951 error ("%<va_start%> used in function with fixed args");
10957 if (va_start_p
&& (nargs
!= 2))
10959 error ("wrong number of arguments to function %<va_start%>");
10962 arg
= CALL_EXPR_ARG (exp
, 1);
10964 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10965 when we checked the arguments and if needed issued a warning. */
10970 /* Evidently an out of date version of <stdarg.h>; can't validate
10971 va_start's second argument, but can still work as intended. */
10972 warning (0, "%<__builtin_next_arg%> called without an argument");
10975 else if (nargs
> 1)
10977 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10980 arg
= CALL_EXPR_ARG (exp
, 0);
10983 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10984 or __builtin_next_arg (0) the first time we see it, after checking
10985 the arguments and if needed issuing a warning. */
10986 if (!integer_zerop (arg
))
10988 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
10990 /* Strip off all nops for the sake of the comparison. This
10991 is not quite the same as STRIP_NOPS. It does more.
10992 We must also strip off INDIRECT_EXPR for C++ reference
10994 while (TREE_CODE (arg
) == NOP_EXPR
10995 || TREE_CODE (arg
) == CONVERT_EXPR
10996 || TREE_CODE (arg
) == NON_LVALUE_EXPR
10997 || TREE_CODE (arg
) == INDIRECT_REF
)
10998 arg
= TREE_OPERAND (arg
, 0);
10999 if (arg
!= last_parm
)
11001 /* FIXME: Sometimes with the tree optimizers we can get the
11002 not the last argument even though the user used the last
11003 argument. We just warn and set the arg to be the last
11004 argument so that we will get wrong-code because of
11006 warning (0, "second parameter of %<va_start%> not last named argument");
11008 /* We want to verify the second parameter just once before the tree
11009 optimizers are run and then avoid keeping it in the tree,
11010 as otherwise we could warn even for correct code like:
11011 void foo (int i, ...)
11012 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
11014 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
11016 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
11022 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
11023 ORIG may be null if this is a 2-argument call. We don't attempt to
11024 simplify calls with more than 3 arguments.
11026 Return NULL_TREE if no simplification was possible, otherwise return the
11027 simplified form of the call as a tree. If IGNORED is true, it means that
11028 the caller does not use the returned value of the function. */
11031 fold_builtin_sprintf (tree dest
, tree fmt
, tree orig
, int ignored
)
11034 const char *fmt_str
= NULL
;
11036 /* Verify the required arguments in the original call. We deal with two
11037 types of sprintf() calls: 'sprintf (str, fmt)' and
11038 'sprintf (dest, "%s", orig)'. */
11039 if (!validate_arg (dest
, POINTER_TYPE
)
11040 || !validate_arg (fmt
, POINTER_TYPE
))
11042 if (orig
&& !validate_arg (orig
, POINTER_TYPE
))
11045 /* Check whether the format is a literal string constant. */
11046 fmt_str
= c_getstr (fmt
);
11047 if (fmt_str
== NULL
)
11051 retval
= NULL_TREE
;
11053 if (!init_target_chars ())
11056 /* If the format doesn't contain % args or %%, use strcpy. */
11057 if (strchr (fmt_str
, target_percent
) == NULL
)
11059 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
11064 /* Don't optimize sprintf (buf, "abc", ptr++). */
11068 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
11069 'format' is known to contain no % formats. */
11070 call
= build_call_expr (fn
, 2, dest
, fmt
);
11072 retval
= build_int_cst (NULL_TREE
, strlen (fmt_str
));
11075 /* If the format is "%s", use strcpy if the result isn't used. */
11076 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
11079 fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
11084 /* Don't crash on sprintf (str1, "%s"). */
11088 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11091 retval
= c_strlen (orig
, 1);
11092 if (!retval
|| TREE_CODE (retval
) != INTEGER_CST
)
11095 call
= build_call_expr (fn
, 2, dest
, orig
);
11098 if (call
&& retval
)
11100 retval
= fold_convert
11101 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls
[BUILT_IN_SPRINTF
])),
11103 return build2 (COMPOUND_EXPR
, TREE_TYPE (retval
), call
, retval
);
11109 /* Expand a call EXP to __builtin_object_size. */
11112 expand_builtin_object_size (tree exp
)
11115 int object_size_type
;
11116 tree fndecl
= get_callee_fndecl (exp
);
11117 location_t locus
= EXPR_LOCATION (exp
);
11119 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11121 error ("%Hfirst argument of %D must be a pointer, second integer constant",
11123 expand_builtin_trap ();
11127 ost
= CALL_EXPR_ARG (exp
, 1);
11130 if (TREE_CODE (ost
) != INTEGER_CST
11131 || tree_int_cst_sgn (ost
) < 0
11132 || compare_tree_int (ost
, 3) > 0)
11134 error ("%Hlast argument of %D is not integer constant between 0 and 3",
11136 expand_builtin_trap ();
11140 object_size_type
= tree_low_cst (ost
, 0);
11142 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
11145 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11146 FCODE is the BUILT_IN_* to use.
11147 Return NULL_RTX if we failed; the caller should emit a normal call,
11148 otherwise try to get the result in TARGET, if convenient (and in
11149 mode MODE if that's convenient). */
11152 expand_builtin_memory_chk (tree exp
, rtx target
, enum machine_mode mode
,
11153 enum built_in_function fcode
)
11155 tree dest
, src
, len
, size
;
11157 if (!validate_arglist (exp
,
11159 fcode
== BUILT_IN_MEMSET_CHK
11160 ? INTEGER_TYPE
: POINTER_TYPE
,
11161 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11164 dest
= CALL_EXPR_ARG (exp
, 0);
11165 src
= CALL_EXPR_ARG (exp
, 1);
11166 len
= CALL_EXPR_ARG (exp
, 2);
11167 size
= CALL_EXPR_ARG (exp
, 3);
11169 if (! host_integerp (size
, 1))
11172 if (host_integerp (len
, 1) || integer_all_onesp (size
))
11176 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
11178 location_t locus
= EXPR_LOCATION (exp
);
11179 warning (0, "%Hcall to %D will always overflow destination buffer",
11180 &locus
, get_callee_fndecl (exp
));
11185 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11186 mem{cpy,pcpy,move,set} is available. */
11189 case BUILT_IN_MEMCPY_CHK
:
11190 fn
= built_in_decls
[BUILT_IN_MEMCPY
];
11192 case BUILT_IN_MEMPCPY_CHK
:
11193 fn
= built_in_decls
[BUILT_IN_MEMPCPY
];
11195 case BUILT_IN_MEMMOVE_CHK
:
11196 fn
= built_in_decls
[BUILT_IN_MEMMOVE
];
11198 case BUILT_IN_MEMSET_CHK
:
11199 fn
= built_in_decls
[BUILT_IN_MEMSET
];
11208 fn
= build_call_expr (fn
, 3, dest
, src
, len
);
11209 if (TREE_CODE (fn
) == CALL_EXPR
)
11210 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11211 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11213 else if (fcode
== BUILT_IN_MEMSET_CHK
)
11217 unsigned int dest_align
11218 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
11220 /* If DEST is not a pointer type, call the normal function. */
11221 if (dest_align
== 0)
11224 /* If SRC and DEST are the same (and not volatile), do nothing. */
11225 if (operand_equal_p (src
, dest
, 0))
11229 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
11231 /* Evaluate and ignore LEN in case it has side-effects. */
11232 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
11233 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
11236 len
= fold_convert (TREE_TYPE (dest
), len
);
11237 expr
= fold_build2 (PLUS_EXPR
, TREE_TYPE (dest
), dest
, len
);
11238 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
11241 /* __memmove_chk special case. */
11242 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
11244 unsigned int src_align
11245 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
11247 if (src_align
== 0)
11250 /* If src is categorized for a readonly section we can use
11251 normal __memcpy_chk. */
11252 if (readonly_data_expr (src
))
11254 tree fn
= built_in_decls
[BUILT_IN_MEMCPY_CHK
];
11257 fn
= build_call_expr (fn
, 4, dest
, src
, len
, size
);
11258 if (TREE_CODE (fn
) == CALL_EXPR
)
11259 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11260 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11267 /* Emit warning if a buffer overflow is detected at compile time. */
11270 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
11278 case BUILT_IN_STRCPY_CHK
:
11279 case BUILT_IN_STPCPY_CHK
:
11280 /* For __strcat_chk the warning will be emitted only if overflowing
11281 by at least strlen (dest) + 1 bytes. */
11282 case BUILT_IN_STRCAT_CHK
:
11283 len
= CALL_EXPR_ARG (exp
, 1);
11284 size
= CALL_EXPR_ARG (exp
, 2);
11287 case BUILT_IN_STRNCAT_CHK
:
11288 case BUILT_IN_STRNCPY_CHK
:
11289 len
= CALL_EXPR_ARG (exp
, 2);
11290 size
= CALL_EXPR_ARG (exp
, 3);
11292 case BUILT_IN_SNPRINTF_CHK
:
11293 case BUILT_IN_VSNPRINTF_CHK
:
11294 len
= CALL_EXPR_ARG (exp
, 1);
11295 size
= CALL_EXPR_ARG (exp
, 3);
11298 gcc_unreachable ();
11304 if (! host_integerp (size
, 1) || integer_all_onesp (size
))
11309 len
= c_strlen (len
, 1);
11310 if (! len
|| ! host_integerp (len
, 1) || tree_int_cst_lt (len
, size
))
11313 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
11315 tree src
= CALL_EXPR_ARG (exp
, 1);
11316 if (! src
|| ! host_integerp (len
, 1) || tree_int_cst_lt (len
, size
))
11318 src
= c_strlen (src
, 1);
11319 if (! src
|| ! host_integerp (src
, 1))
11321 locus
= EXPR_LOCATION (exp
);
11322 warning (0, "%Hcall to %D might overflow destination buffer",
11323 &locus
, get_callee_fndecl (exp
));
11326 else if (tree_int_cst_lt (src
, size
))
11329 else if (! host_integerp (len
, 1) || ! tree_int_cst_lt (size
, len
))
11332 locus
= EXPR_LOCATION (exp
);
11333 warning (0, "%Hcall to %D will always overflow destination buffer",
11334 &locus
, get_callee_fndecl (exp
));
11337 /* Emit warning if a buffer overflow is detected at compile time
11338 in __sprintf_chk/__vsprintf_chk calls. */
11341 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
11343 tree dest
, size
, len
, fmt
, flag
;
11344 const char *fmt_str
;
11345 int nargs
= call_expr_nargs (exp
);
11347 /* Verify the required arguments in the original call. */
11351 dest
= CALL_EXPR_ARG (exp
, 0);
11352 flag
= CALL_EXPR_ARG (exp
, 1);
11353 size
= CALL_EXPR_ARG (exp
, 2);
11354 fmt
= CALL_EXPR_ARG (exp
, 3);
11356 if (! host_integerp (size
, 1) || integer_all_onesp (size
))
11359 /* Check whether the format is a literal string constant. */
11360 fmt_str
= c_getstr (fmt
);
11361 if (fmt_str
== NULL
)
11364 if (!init_target_chars ())
11367 /* If the format doesn't contain % args or %%, we know its size. */
11368 if (strchr (fmt_str
, target_percent
) == 0)
11369 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
11370 /* If the format is "%s" and first ... argument is a string literal,
11372 else if (fcode
== BUILT_IN_SPRINTF_CHK
11373 && strcmp (fmt_str
, target_percent_s
) == 0)
11379 arg
= CALL_EXPR_ARG (exp
, 4);
11380 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
11383 len
= c_strlen (arg
, 1);
11384 if (!len
|| ! host_integerp (len
, 1))
11390 if (! tree_int_cst_lt (len
, size
))
11392 location_t locus
= EXPR_LOCATION (exp
);
11393 warning (0, "%Hcall to %D will always overflow destination buffer",
11394 &locus
, get_callee_fndecl (exp
));
11398 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11402 fold_builtin_object_size (tree ptr
, tree ost
)
11404 tree ret
= NULL_TREE
;
11405 int object_size_type
;
11407 if (!validate_arg (ptr
, POINTER_TYPE
)
11408 || !validate_arg (ost
, INTEGER_TYPE
))
11413 if (TREE_CODE (ost
) != INTEGER_CST
11414 || tree_int_cst_sgn (ost
) < 0
11415 || compare_tree_int (ost
, 3) > 0)
11418 object_size_type
= tree_low_cst (ost
, 0);
11420 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11421 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11422 and (size_t) 0 for types 2 and 3. */
11423 if (TREE_SIDE_EFFECTS (ptr
))
11424 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
11426 if (TREE_CODE (ptr
) == ADDR_EXPR
)
11427 ret
= build_int_cstu (size_type_node
,
11428 compute_builtin_object_size (ptr
, object_size_type
));
11430 else if (TREE_CODE (ptr
) == SSA_NAME
)
11432 unsigned HOST_WIDE_INT bytes
;
11434 /* If object size is not known yet, delay folding until
11435 later. Maybe subsequent passes will help determining
11437 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11438 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
< 2
11440 ret
= build_int_cstu (size_type_node
, bytes
);
11445 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (ret
);
11446 HOST_WIDE_INT high
= TREE_INT_CST_HIGH (ret
);
11447 if (fit_double_type (low
, high
, &low
, &high
, TREE_TYPE (ret
)))
11454 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11455 DEST, SRC, LEN, and SIZE are the arguments to the call.
11456 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11457 code of the builtin. If MAXLEN is not NULL, it is maximum length
11458 passed as third argument. */
11461 fold_builtin_memory_chk (tree fndecl
,
11462 tree dest
, tree src
, tree len
, tree size
,
11463 tree maxlen
, bool ignore
,
11464 enum built_in_function fcode
)
11468 if (!validate_arg (dest
, POINTER_TYPE
)
11469 || !validate_arg (src
,
11470 (fcode
== BUILT_IN_MEMSET_CHK
11471 ? INTEGER_TYPE
: POINTER_TYPE
))
11472 || !validate_arg (len
, INTEGER_TYPE
)
11473 || !validate_arg (size
, INTEGER_TYPE
))
11476 /* If SRC and DEST are the same (and not volatile), return DEST
11477 (resp. DEST+LEN for __mempcpy_chk). */
11478 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
11480 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
11481 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, len
);
11484 tree temp
= fold_convert (TREE_TYPE (dest
), len
);
11485 temp
= fold_build2 (PLUS_EXPR
, TREE_TYPE (dest
), dest
, temp
);
11486 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), temp
);
11490 if (! host_integerp (size
, 1))
11493 if (! integer_all_onesp (size
))
11495 if (! host_integerp (len
, 1))
11497 /* If LEN is not constant, try MAXLEN too.
11498 For MAXLEN only allow optimizing into non-_ocs function
11499 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11500 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
11502 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
11504 /* (void) __mempcpy_chk () can be optimized into
11505 (void) __memcpy_chk (). */
11506 fn
= built_in_decls
[BUILT_IN_MEMCPY_CHK
];
11510 return build_call_expr (fn
, 4, dest
, src
, len
, size
);
11518 if (tree_int_cst_lt (size
, maxlen
))
11523 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11524 mem{cpy,pcpy,move,set} is available. */
11527 case BUILT_IN_MEMCPY_CHK
:
11528 fn
= built_in_decls
[BUILT_IN_MEMCPY
];
11530 case BUILT_IN_MEMPCPY_CHK
:
11531 fn
= built_in_decls
[BUILT_IN_MEMPCPY
];
11533 case BUILT_IN_MEMMOVE_CHK
:
11534 fn
= built_in_decls
[BUILT_IN_MEMMOVE
];
11536 case BUILT_IN_MEMSET_CHK
:
11537 fn
= built_in_decls
[BUILT_IN_MEMSET
];
11546 return build_call_expr (fn
, 3, dest
, src
, len
);
11549 /* Fold a call to the __st[rp]cpy_chk builtin.
11550 DEST, SRC, and SIZE are the arguments to the call.
11551 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
11552 code of the builtin. If MAXLEN is not NULL, it is maximum length of
11553 strings passed as second argument. */
11556 fold_builtin_stxcpy_chk (tree fndecl
, tree dest
, tree src
, tree size
,
11557 tree maxlen
, bool ignore
,
11558 enum built_in_function fcode
)
11562 if (!validate_arg (dest
, POINTER_TYPE
)
11563 || !validate_arg (src
, POINTER_TYPE
)
11564 || !validate_arg (size
, INTEGER_TYPE
))
11567 /* If SRC and DEST are the same (and not volatile), return DEST. */
11568 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
11569 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
11571 if (! host_integerp (size
, 1))
11574 if (! integer_all_onesp (size
))
11576 len
= c_strlen (src
, 1);
11577 if (! len
|| ! host_integerp (len
, 1))
11579 /* If LEN is not constant, try MAXLEN too.
11580 For MAXLEN only allow optimizing into non-_ocs function
11581 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11582 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
11584 if (fcode
== BUILT_IN_STPCPY_CHK
)
11589 /* If return value of __stpcpy_chk is ignored,
11590 optimize into __strcpy_chk. */
11591 fn
= built_in_decls
[BUILT_IN_STRCPY_CHK
];
11595 return build_call_expr (fn
, 3, dest
, src
, size
);
11598 if (! len
|| TREE_SIDE_EFFECTS (len
))
11601 /* If c_strlen returned something, but not a constant,
11602 transform __strcpy_chk into __memcpy_chk. */
11603 fn
= built_in_decls
[BUILT_IN_MEMCPY_CHK
];
11607 len
= size_binop (PLUS_EXPR
, len
, ssize_int (1));
11608 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)),
11609 build_call_expr (fn
, 4,
11610 dest
, src
, len
, size
));
11616 if (! tree_int_cst_lt (maxlen
, size
))
11620 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
11621 fn
= built_in_decls
[fcode
== BUILT_IN_STPCPY_CHK
11622 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
];
11626 return build_call_expr (fn
, 2, dest
, src
);
11629 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
11630 are the arguments to the call. If MAXLEN is not NULL, it is maximum
11631 length passed as third argument. */
11634 fold_builtin_strncpy_chk (tree dest
, tree src
, tree len
, tree size
,
11639 if (!validate_arg (dest
, POINTER_TYPE
)
11640 || !validate_arg (src
, POINTER_TYPE
)
11641 || !validate_arg (len
, INTEGER_TYPE
)
11642 || !validate_arg (size
, INTEGER_TYPE
))
11645 if (! host_integerp (size
, 1))
11648 if (! integer_all_onesp (size
))
11650 if (! host_integerp (len
, 1))
11652 /* If LEN is not constant, try MAXLEN too.
11653 For MAXLEN only allow optimizing into non-_ocs function
11654 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11655 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
11661 if (tree_int_cst_lt (size
, maxlen
))
11665 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
11666 fn
= built_in_decls
[BUILT_IN_STRNCPY
];
11670 return build_call_expr (fn
, 3, dest
, src
, len
);
11673 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
11674 are the arguments to the call. */
11677 fold_builtin_strcat_chk (tree fndecl
, tree dest
, tree src
, tree size
)
11682 if (!validate_arg (dest
, POINTER_TYPE
)
11683 || !validate_arg (src
, POINTER_TYPE
)
11684 || !validate_arg (size
, INTEGER_TYPE
))
11687 p
= c_getstr (src
);
11688 /* If the SRC parameter is "", return DEST. */
11689 if (p
&& *p
== '\0')
11690 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
11692 if (! host_integerp (size
, 1) || ! integer_all_onesp (size
))
11695 /* If __builtin_strcat_chk is used, assume strcat is available. */
11696 fn
= built_in_decls
[BUILT_IN_STRCAT
];
11700 return build_call_expr (fn
, 2, dest
, src
);
11703 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
11707 fold_builtin_strncat_chk (tree fndecl
,
11708 tree dest
, tree src
, tree len
, tree size
)
11713 if (!validate_arg (dest
, POINTER_TYPE
)
11714 || !validate_arg (src
, POINTER_TYPE
)
11715 || !validate_arg (size
, INTEGER_TYPE
)
11716 || !validate_arg (size
, INTEGER_TYPE
))
11719 p
= c_getstr (src
);
11720 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
11721 if (p
&& *p
== '\0')
11722 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, len
);
11723 else if (integer_zerop (len
))
11724 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
11726 if (! host_integerp (size
, 1))
11729 if (! integer_all_onesp (size
))
11731 tree src_len
= c_strlen (src
, 1);
11733 && host_integerp (src_len
, 1)
11734 && host_integerp (len
, 1)
11735 && ! tree_int_cst_lt (len
, src_len
))
11737 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
11738 fn
= built_in_decls
[BUILT_IN_STRCAT_CHK
];
11742 return build_call_expr (fn
, 3, dest
, src
, size
);
11747 /* If __builtin_strncat_chk is used, assume strncat is available. */
11748 fn
= built_in_decls
[BUILT_IN_STRNCAT
];
11752 return build_call_expr (fn
, 3, dest
, src
, len
);
11755 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
11756 a normal call should be emitted rather than expanding the function
11757 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
11760 fold_builtin_sprintf_chk (tree exp
, enum built_in_function fcode
)
11762 tree dest
, size
, len
, fn
, fmt
, flag
;
11763 const char *fmt_str
;
11764 int nargs
= call_expr_nargs (exp
);
11766 /* Verify the required arguments in the original call. */
11769 dest
= CALL_EXPR_ARG (exp
, 0);
11770 if (!validate_arg (dest
, POINTER_TYPE
))
11772 flag
= CALL_EXPR_ARG (exp
, 1);
11773 if (!validate_arg (flag
, INTEGER_TYPE
))
11775 size
= CALL_EXPR_ARG (exp
, 2);
11776 if (!validate_arg (size
, INTEGER_TYPE
))
11778 fmt
= CALL_EXPR_ARG (exp
, 3);
11779 if (!validate_arg (fmt
, POINTER_TYPE
))
11782 if (! host_integerp (size
, 1))
11787 if (!init_target_chars ())
11790 /* Check whether the format is a literal string constant. */
11791 fmt_str
= c_getstr (fmt
);
11792 if (fmt_str
!= NULL
)
11794 /* If the format doesn't contain % args or %%, we know the size. */
11795 if (strchr (fmt_str
, target_percent
) == 0)
11797 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
11798 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
11800 /* If the format is "%s" and first ... argument is a string literal,
11801 we know the size too. */
11802 else if (fcode
== BUILT_IN_SPRINTF_CHK
11803 && strcmp (fmt_str
, target_percent_s
) == 0)
11809 arg
= CALL_EXPR_ARG (exp
, 4);
11810 if (validate_arg (arg
, POINTER_TYPE
))
11812 len
= c_strlen (arg
, 1);
11813 if (! len
|| ! host_integerp (len
, 1))
11820 if (! integer_all_onesp (size
))
11822 if (! len
|| ! tree_int_cst_lt (len
, size
))
11826 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
11827 or if format doesn't contain % chars or is "%s". */
11828 if (! integer_zerop (flag
))
11830 if (fmt_str
== NULL
)
11832 if (strchr (fmt_str
, target_percent
) != NULL
11833 && strcmp (fmt_str
, target_percent_s
))
11837 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
11838 fn
= built_in_decls
[fcode
== BUILT_IN_VSPRINTF_CHK
11839 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
];
11843 return rewrite_call_expr (exp
, 4, fn
, 2, dest
, fmt
);
11846 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
11847 a normal call should be emitted rather than expanding the function
11848 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
11849 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
11850 passed as second argument. */
11853 fold_builtin_snprintf_chk (tree exp
, tree maxlen
,
11854 enum built_in_function fcode
)
11856 tree dest
, size
, len
, fn
, fmt
, flag
;
11857 const char *fmt_str
;
11859 /* Verify the required arguments in the original call. */
11860 if (call_expr_nargs (exp
) < 5)
11862 dest
= CALL_EXPR_ARG (exp
, 0);
11863 if (!validate_arg (dest
, POINTER_TYPE
))
11865 len
= CALL_EXPR_ARG (exp
, 1);
11866 if (!validate_arg (len
, INTEGER_TYPE
))
11868 flag
= CALL_EXPR_ARG (exp
, 2);
11869 if (!validate_arg (flag
, INTEGER_TYPE
))
11871 size
= CALL_EXPR_ARG (exp
, 3);
11872 if (!validate_arg (size
, INTEGER_TYPE
))
11874 fmt
= CALL_EXPR_ARG (exp
, 4);
11875 if (!validate_arg (fmt
, POINTER_TYPE
))
11878 if (! host_integerp (size
, 1))
11881 if (! integer_all_onesp (size
))
11883 if (! host_integerp (len
, 1))
11885 /* If LEN is not constant, try MAXLEN too.
11886 For MAXLEN only allow optimizing into non-_ocs function
11887 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11888 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
11894 if (tree_int_cst_lt (size
, maxlen
))
11898 if (!init_target_chars ())
11901 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
11902 or if format doesn't contain % chars or is "%s". */
11903 if (! integer_zerop (flag
))
11905 fmt_str
= c_getstr (fmt
);
11906 if (fmt_str
== NULL
)
11908 if (strchr (fmt_str
, target_percent
) != NULL
11909 && strcmp (fmt_str
, target_percent_s
))
11913 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
11915 fn
= built_in_decls
[fcode
== BUILT_IN_VSNPRINTF_CHK
11916 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
];
11920 return rewrite_call_expr (exp
, 5, fn
, 3, dest
, len
, fmt
);
11923 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
11924 FMT and ARG are the arguments to the call; we don't fold cases with
11925 more than 2 arguments, and ARG may be null if this is a 1-argument case.
11927 Return NULL_TREE if no simplification was possible, otherwise return the
11928 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11929 code of the function to be simplified. */
11932 fold_builtin_printf (tree fndecl
, tree fmt
, tree arg
, bool ignore
,
11933 enum built_in_function fcode
)
11935 tree fn_putchar
, fn_puts
, newarg
, call
= NULL_TREE
;
11936 const char *fmt_str
= NULL
;
11938 /* If the return value is used, don't do the transformation. */
11942 /* Verify the required arguments in the original call. */
11943 if (!validate_arg (fmt
, POINTER_TYPE
))
11946 /* Check whether the format is a literal string constant. */
11947 fmt_str
= c_getstr (fmt
);
11948 if (fmt_str
== NULL
)
11951 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
11953 /* If we're using an unlocked function, assume the other
11954 unlocked functions exist explicitly. */
11955 fn_putchar
= built_in_decls
[BUILT_IN_PUTCHAR_UNLOCKED
];
11956 fn_puts
= built_in_decls
[BUILT_IN_PUTS_UNLOCKED
];
11960 fn_putchar
= implicit_built_in_decls
[BUILT_IN_PUTCHAR
];
11961 fn_puts
= implicit_built_in_decls
[BUILT_IN_PUTS
];
11964 if (!init_target_chars ())
11967 if (strcmp (fmt_str
, target_percent_s
) == 0
11968 || strchr (fmt_str
, target_percent
) == NULL
)
11972 if (strcmp (fmt_str
, target_percent_s
) == 0)
11974 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
11977 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
11980 str
= c_getstr (arg
);
11986 /* The format specifier doesn't contain any '%' characters. */
11987 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
11993 /* If the string was "", printf does nothing. */
11994 if (str
[0] == '\0')
11995 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
11997 /* If the string has length of 1, call putchar. */
11998 if (str
[1] == '\0')
12000 /* Given printf("c"), (where c is any one character,)
12001 convert "c"[0] to an int and pass that to the replacement
12003 newarg
= build_int_cst (NULL_TREE
, str
[0]);
12005 call
= build_call_expr (fn_putchar
, 1, newarg
);
12009 /* If the string was "string\n", call puts("string"). */
12010 size_t len
= strlen (str
);
12011 if ((unsigned char)str
[len
- 1] == target_newline
)
12013 /* Create a NUL-terminated string that's one char shorter
12014 than the original, stripping off the trailing '\n'. */
12015 char *newstr
= alloca (len
);
12016 memcpy (newstr
, str
, len
- 1);
12017 newstr
[len
- 1] = 0;
12019 newarg
= build_string_literal (len
, newstr
);
12021 call
= build_call_expr (fn_puts
, 1, newarg
);
12024 /* We'd like to arrange to call fputs(string,stdout) here,
12025 but we need stdout and don't have a way to get it yet. */
12030 /* The other optimizations can be done only on the non-va_list variants. */
12031 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
12034 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
12035 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
12037 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
12040 call
= build_call_expr (fn_puts
, 1, arg
);
12043 /* If the format specifier was "%c", call __builtin_putchar(arg). */
12044 else if (strcmp (fmt_str
, target_percent_c
) == 0)
12046 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
12049 call
= build_call_expr (fn_putchar
, 1, arg
);
12055 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), call
);
12058 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
12059 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
12060 more than 3 arguments, and ARG may be null in the 2-argument case.
12062 Return NULL_TREE if no simplification was possible, otherwise return the
12063 simplified form of the call as a tree. FCODE is the BUILT_IN_*
12064 code of the function to be simplified. */
12067 fold_builtin_fprintf (tree fndecl
, tree fp
, tree fmt
, tree arg
, bool ignore
,
12068 enum built_in_function fcode
)
12070 tree fn_fputc
, fn_fputs
, call
= NULL_TREE
;
12071 const char *fmt_str
= NULL
;
12073 /* If the return value is used, don't do the transformation. */
12077 /* Verify the required arguments in the original call. */
12078 if (!validate_arg (fp
, POINTER_TYPE
))
12080 if (!validate_arg (fmt
, POINTER_TYPE
))
12083 /* Check whether the format is a literal string constant. */
12084 fmt_str
= c_getstr (fmt
);
12085 if (fmt_str
== NULL
)
12088 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
12090 /* If we're using an unlocked function, assume the other
12091 unlocked functions exist explicitly. */
12092 fn_fputc
= built_in_decls
[BUILT_IN_FPUTC_UNLOCKED
];
12093 fn_fputs
= built_in_decls
[BUILT_IN_FPUTS_UNLOCKED
];
12097 fn_fputc
= implicit_built_in_decls
[BUILT_IN_FPUTC
];
12098 fn_fputs
= implicit_built_in_decls
[BUILT_IN_FPUTS
];
12101 if (!init_target_chars ())
12104 /* If the format doesn't contain % args or %%, use strcpy. */
12105 if (strchr (fmt_str
, target_percent
) == NULL
)
12107 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
12111 /* If the format specifier was "", fprintf does nothing. */
12112 if (fmt_str
[0] == '\0')
12114 /* If FP has side-effects, just wait until gimplification is
12116 if (TREE_SIDE_EFFECTS (fp
))
12119 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
12122 /* When "string" doesn't contain %, replace all cases of
12123 fprintf (fp, string) with fputs (string, fp). The fputs
12124 builtin will take care of special cases like length == 1. */
12126 call
= build_call_expr (fn_fputs
, 2, fmt
, fp
);
12129 /* The other optimizations can be done only on the non-va_list variants. */
12130 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
12133 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12134 else if (strcmp (fmt_str
, target_percent_s
) == 0)
12136 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
12139 call
= build_call_expr (fn_fputs
, 2, arg
, fp
);
12142 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12143 else if (strcmp (fmt_str
, target_percent_c
) == 0)
12145 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
12148 call
= build_call_expr (fn_fputc
, 2, arg
, fp
);
12153 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), call
);
12156 /* Initialize format string characters in the target charset. */
12159 init_target_chars (void)
12164 target_newline
= lang_hooks
.to_target_charset ('\n');
12165 target_percent
= lang_hooks
.to_target_charset ('%');
12166 target_c
= lang_hooks
.to_target_charset ('c');
12167 target_s
= lang_hooks
.to_target_charset ('s');
12168 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
12172 target_percent_c
[0] = target_percent
;
12173 target_percent_c
[1] = target_c
;
12174 target_percent_c
[2] = '\0';
12176 target_percent_s
[0] = target_percent
;
12177 target_percent_s
[1] = target_s
;
12178 target_percent_s
[2] = '\0';
12180 target_percent_s_newline
[0] = target_percent
;
12181 target_percent_s_newline
[1] = target_s
;
12182 target_percent_s_newline
[2] = target_newline
;
12183 target_percent_s_newline
[3] = '\0';
12190 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12191 and no overflow/underflow occurred. INEXACT is true if M was not
12192 exactly calculated. TYPE is the tree type for the result. This
12193 function assumes that you cleared the MPFR flags and then
12194 calculated M to see if anything subsequently set a flag prior to
12195 entering this function. Return NULL_TREE if any checks fail. */
12198 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
12200 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12201 overflow/underflow occurred. If -frounding-math, proceed iff the
12202 result of calling FUNC was exact. */
12203 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12204 && (!flag_rounding_math
|| !inexact
))
12206 REAL_VALUE_TYPE rr
;
12208 real_from_mpfr (&rr
, m
);
12209 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12210 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12211 but the mpft_t is not, then we underflowed in the
12213 if (!real_isnan (&rr
) && !real_isinf (&rr
)
12214 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
12216 REAL_VALUE_TYPE rmode
;
12218 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
12219 /* Proceed iff the specified mode can hold the value. */
12220 if (real_identical (&rmode
, &rr
))
12221 return build_real (type
, rmode
);
12227 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12228 FUNC on it and return the resulting value as a tree with type TYPE.
12229 If MIN and/or MAX are not NULL, then the supplied ARG must be
12230 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12231 acceptable values, otherwise they are not. The mpfr precision is
12232 set to the precision of TYPE. We assume that function FUNC returns
12233 zero if the result could be calculated exactly within the requested
12237 do_mpfr_arg1 (tree arg
, tree type
, int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
12238 const REAL_VALUE_TYPE
*min
, const REAL_VALUE_TYPE
*max
,
12241 tree result
= NULL_TREE
;
12245 /* To proceed, MPFR must exactly represent the target floating point
12246 format, which only happens when the target base equals two. */
12247 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12248 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
12250 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
12252 if (!real_isnan (ra
) && !real_isinf (ra
)
12253 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
))
12254 && (!max
|| real_compare (inclusive
? LE_EXPR
: LT_EXPR
, ra
, max
)))
12256 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12260 mpfr_init2 (m
, prec
);
12261 mpfr_from_real (m
, ra
);
12262 mpfr_clear_flags ();
12263 inexact
= func (m
, m
, GMP_RNDN
);
12264 result
= do_mpfr_ckconv (m
, type
, inexact
);
12272 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12273 FUNC on it and return the resulting value as a tree with type TYPE.
12274 The mpfr precision is set to the precision of TYPE. We assume that
12275 function FUNC returns zero if the result could be calculated
12276 exactly within the requested precision. */
12279 do_mpfr_arg2 (tree arg1
, tree arg2
, tree type
,
12280 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
12282 tree result
= NULL_TREE
;
12287 /* To proceed, MPFR must exactly represent the target floating point
12288 format, which only happens when the target base equals two. */
12289 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12290 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
12291 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
12293 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
12294 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
12296 if (!real_isnan (ra1
) && !real_isinf (ra1
)
12297 && !real_isnan (ra2
) && !real_isinf (ra2
))
12299 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12303 mpfr_inits2 (prec
, m1
, m2
, NULL
);
12304 mpfr_from_real (m1
, ra1
);
12305 mpfr_from_real (m2
, ra2
);
12306 mpfr_clear_flags ();
12307 inexact
= func (m1
, m1
, m2
, GMP_RNDN
);
12308 result
= do_mpfr_ckconv (m1
, type
, inexact
);
12309 mpfr_clears (m1
, m2
, NULL
);
12316 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12317 FUNC on it and return the resulting value as a tree with type TYPE.
12318 The mpfr precision is set to the precision of TYPE. We assume that
12319 function FUNC returns zero if the result could be calculated
12320 exactly within the requested precision. */
12323 do_mpfr_arg3 (tree arg1
, tree arg2
, tree arg3
, tree type
,
12324 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
12326 tree result
= NULL_TREE
;
12332 /* To proceed, MPFR must exactly represent the target floating point
12333 format, which only happens when the target base equals two. */
12334 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12335 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
12336 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
)
12337 && TREE_CODE (arg3
) == REAL_CST
&& !TREE_OVERFLOW (arg3
))
12339 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
12340 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
12341 const REAL_VALUE_TYPE
*const ra3
= &TREE_REAL_CST (arg3
);
12343 if (!real_isnan (ra1
) && !real_isinf (ra1
)
12344 && !real_isnan (ra2
) && !real_isinf (ra2
)
12345 && !real_isnan (ra3
) && !real_isinf (ra3
))
12347 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12351 mpfr_inits2 (prec
, m1
, m2
, m3
, NULL
);
12352 mpfr_from_real (m1
, ra1
);
12353 mpfr_from_real (m2
, ra2
);
12354 mpfr_from_real (m3
, ra3
);
12355 mpfr_clear_flags ();
12356 inexact
= func (m1
, m1
, m2
, m3
, GMP_RNDN
);
12357 result
= do_mpfr_ckconv (m1
, type
, inexact
);
12358 mpfr_clears (m1
, m2
, m3
, NULL
);
12365 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12366 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12367 If ARG_SINP and ARG_COSP are NULL then the result is returned
12368 as a complex value.
12369 The type is taken from the type of ARG and is used for setting the
12370 precision of the calculation and results. */
12373 do_mpfr_sincos (tree arg
, tree arg_sinp
, tree arg_cosp
)
12375 tree
const type
= TREE_TYPE (arg
);
12376 tree result
= NULL_TREE
;
12380 /* To proceed, MPFR must exactly represent the target floating point
12381 format, which only happens when the target base equals two. */
12382 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12383 && TREE_CODE (arg
) == REAL_CST
12384 && !TREE_OVERFLOW (arg
))
12386 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
12388 if (!real_isnan (ra
) && !real_isinf (ra
))
12390 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12391 tree result_s
, result_c
;
12395 mpfr_inits2 (prec
, m
, ms
, mc
, NULL
);
12396 mpfr_from_real (m
, ra
);
12397 mpfr_clear_flags ();
12398 inexact
= mpfr_sin_cos (ms
, mc
, m
, GMP_RNDN
);
12399 result_s
= do_mpfr_ckconv (ms
, type
, inexact
);
12400 result_c
= do_mpfr_ckconv (mc
, type
, inexact
);
12401 mpfr_clears (m
, ms
, mc
, NULL
);
12402 if (result_s
&& result_c
)
12404 /* If we are to return in a complex value do so. */
12405 if (!arg_sinp
&& !arg_cosp
)
12406 return build_complex (build_complex_type (type
),
12407 result_c
, result_s
);
12409 /* Dereference the sin/cos pointer arguments. */
12410 arg_sinp
= build_fold_indirect_ref (arg_sinp
);
12411 arg_cosp
= build_fold_indirect_ref (arg_cosp
);
12412 /* Proceed if valid pointer type were passed in. */
12413 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp
)) == TYPE_MAIN_VARIANT (type
)
12414 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp
)) == TYPE_MAIN_VARIANT (type
))
12416 /* Set the values. */
12417 result_s
= fold_build2 (MODIFY_EXPR
, type
, arg_sinp
,
12419 TREE_SIDE_EFFECTS (result_s
) = 1;
12420 result_c
= fold_build2 (MODIFY_EXPR
, type
, arg_cosp
,
12422 TREE_SIDE_EFFECTS (result_c
) = 1;
12423 /* Combine the assignments into a compound expr. */
12424 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12425 result_s
, result_c
));