1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 #include "coretypes.h"
31 #include "tree-gimple.h"
34 #include "hard-reg-set.h"
37 #include "insn-config.h"
43 #include "typeclass.h"
48 #include "langhooks.h"
49 #include "basic-block.h"
50 #include "tree-mudflap.h"
51 #include "tree-flow.h"
52 #include "value-prof.h"
54 #ifndef PAD_VARARGS_DOWN
55 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
58 /* Define the names of the builtin function types and codes. */
59 const char *const built_in_class_names
[4]
60 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
62 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
63 const char * built_in_names
[(int) END_BUILTINS
] =
65 #include "builtins.def"
69 /* Setup an array of _DECL trees, make sure each element is
70 initialized to NULL_TREE. */
71 tree built_in_decls
[(int) END_BUILTINS
];
72 /* Declarations used when constructing the builtin implicitly in the compiler.
73 It may be NULL_TREE when this is invalid (for instance runtime is not
74 required to implement the function call in all cases). */
75 tree implicit_built_in_decls
[(int) END_BUILTINS
];
77 static const char *c_getstr (tree
);
78 static rtx
c_readstr (const char *, enum machine_mode
);
79 static int target_char_cast (tree
, char *);
80 static rtx
get_memory_rtx (tree
, tree
);
81 static int apply_args_size (void);
82 static int apply_result_size (void);
83 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
84 static rtx
result_vector (int, rtx
);
86 static void expand_builtin_update_setjmp_buf (rtx
);
87 static void expand_builtin_prefetch (tree
);
88 static rtx
expand_builtin_apply_args (void);
89 static rtx
expand_builtin_apply_args_1 (void);
90 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
91 static void expand_builtin_return (rtx
);
92 static enum type_class
type_to_class (tree
);
93 static rtx
expand_builtin_classify_type (tree
);
94 static void expand_errno_check (tree
, rtx
);
95 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
96 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
97 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
98 static rtx
expand_builtin_interclass_mathfn (tree
, rtx
, rtx
);
99 static rtx
expand_builtin_sincos (tree
);
100 static rtx
expand_builtin_cexpi (tree
, rtx
, rtx
);
101 static rtx
expand_builtin_int_roundingfn (tree
, rtx
, rtx
);
102 static rtx
expand_builtin_int_roundingfn_2 (tree
, rtx
, rtx
);
103 static rtx
expand_builtin_args_info (tree
);
104 static rtx
expand_builtin_next_arg (void);
105 static rtx
expand_builtin_va_start (tree
);
106 static rtx
expand_builtin_va_end (tree
);
107 static rtx
expand_builtin_va_copy (tree
);
108 static rtx
expand_builtin_memcmp (tree
, rtx
, enum machine_mode
);
109 static rtx
expand_builtin_strcmp (tree
, rtx
, enum machine_mode
);
110 static rtx
expand_builtin_strncmp (tree
, rtx
, enum machine_mode
);
111 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
112 static rtx
expand_builtin_strcat (tree
, tree
, rtx
, enum machine_mode
);
113 static rtx
expand_builtin_strncat (tree
, rtx
, enum machine_mode
);
114 static rtx
expand_builtin_strspn (tree
, rtx
, enum machine_mode
);
115 static rtx
expand_builtin_strcspn (tree
, rtx
, enum machine_mode
);
116 static rtx
expand_builtin_memcpy (tree
, rtx
, enum machine_mode
);
117 static rtx
expand_builtin_mempcpy (tree
, rtx
, enum machine_mode
);
118 static rtx
expand_builtin_mempcpy_args (tree
, tree
, tree
, tree
, rtx
,
119 enum machine_mode
, int);
120 static rtx
expand_builtin_memmove (tree
, rtx
, enum machine_mode
, int);
121 static rtx
expand_builtin_memmove_args (tree
, tree
, tree
, tree
, rtx
,
122 enum machine_mode
, int);
123 static rtx
expand_builtin_bcopy (tree
, int);
124 static rtx
expand_builtin_strcpy (tree
, tree
, rtx
, enum machine_mode
);
125 static rtx
expand_builtin_strcpy_args (tree
, tree
, tree
, rtx
, enum machine_mode
);
126 static rtx
expand_builtin_stpcpy (tree
, rtx
, enum machine_mode
);
127 static rtx
builtin_strncpy_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
128 static rtx
expand_builtin_strncpy (tree
, rtx
, enum machine_mode
);
129 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, enum machine_mode
);
130 static rtx
expand_builtin_memset (tree
, rtx
, enum machine_mode
);
131 static rtx
expand_builtin_memset_args (tree
, tree
, tree
, rtx
, enum machine_mode
, tree
);
132 static rtx
expand_builtin_bzero (tree
);
133 static rtx
expand_builtin_strlen (tree
, rtx
, enum machine_mode
);
134 static rtx
expand_builtin_strstr (tree
, rtx
, enum machine_mode
);
135 static rtx
expand_builtin_strpbrk (tree
, rtx
, enum machine_mode
);
136 static rtx
expand_builtin_strchr (tree
, rtx
, enum machine_mode
);
137 static rtx
expand_builtin_strrchr (tree
, rtx
, enum machine_mode
);
138 static rtx
expand_builtin_alloca (tree
, rtx
);
139 static rtx
expand_builtin_unop (enum machine_mode
, tree
, rtx
, rtx
, optab
);
140 static rtx
expand_builtin_frame_address (tree
, tree
);
141 static rtx
expand_builtin_fputs (tree
, rtx
, bool);
142 static rtx
expand_builtin_printf (tree
, rtx
, enum machine_mode
, bool);
143 static rtx
expand_builtin_fprintf (tree
, rtx
, enum machine_mode
, bool);
144 static rtx
expand_builtin_sprintf (tree
, rtx
, enum machine_mode
);
145 static tree
stabilize_va_list (tree
, int);
146 static rtx
expand_builtin_expect (tree
, rtx
);
147 static tree
fold_builtin_constant_p (tree
);
148 static tree
fold_builtin_expect (tree
);
149 static tree
fold_builtin_classify_type (tree
);
150 static tree
fold_builtin_strlen (tree
);
151 static tree
fold_builtin_inf (tree
, int);
152 static tree
fold_builtin_nan (tree
, tree
, int);
153 static tree
rewrite_call_expr (tree
, int, tree
, int, ...);
154 static bool validate_arg (tree
, enum tree_code code
);
155 static bool integer_valued_real_p (tree
);
156 static tree
fold_trunc_transparent_mathfn (tree
, tree
);
157 static bool readonly_data_expr (tree
);
158 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
159 static rtx
expand_builtin_signbit (tree
, rtx
);
160 static tree
fold_builtin_sqrt (tree
, tree
);
161 static tree
fold_builtin_cbrt (tree
, tree
);
162 static tree
fold_builtin_pow (tree
, tree
, tree
, tree
);
163 static tree
fold_builtin_powi (tree
, tree
, tree
, tree
);
164 static tree
fold_builtin_cos (tree
, tree
, tree
);
165 static tree
fold_builtin_cosh (tree
, tree
, tree
);
166 static tree
fold_builtin_tan (tree
, tree
);
167 static tree
fold_builtin_trunc (tree
, tree
);
168 static tree
fold_builtin_floor (tree
, tree
);
169 static tree
fold_builtin_ceil (tree
, tree
);
170 static tree
fold_builtin_round (tree
, tree
);
171 static tree
fold_builtin_int_roundingfn (tree
, tree
);
172 static tree
fold_builtin_bitop (tree
, tree
);
173 static tree
fold_builtin_memory_op (tree
, tree
, tree
, tree
, bool, int);
174 static tree
fold_builtin_strchr (tree
, tree
, tree
);
175 static tree
fold_builtin_memcmp (tree
, tree
, tree
);
176 static tree
fold_builtin_strcmp (tree
, tree
);
177 static tree
fold_builtin_strncmp (tree
, tree
, tree
);
178 static tree
fold_builtin_signbit (tree
, tree
);
179 static tree
fold_builtin_copysign (tree
, tree
, tree
, tree
);
180 static tree
fold_builtin_isascii (tree
);
181 static tree
fold_builtin_toascii (tree
);
182 static tree
fold_builtin_isdigit (tree
);
183 static tree
fold_builtin_fabs (tree
, tree
);
184 static tree
fold_builtin_abs (tree
, tree
);
185 static tree
fold_builtin_unordered_cmp (tree
, tree
, tree
, enum tree_code
,
187 static tree
fold_builtin_n (tree
, tree
*, int, bool);
188 static tree
fold_builtin_0 (tree
, bool);
189 static tree
fold_builtin_1 (tree
, tree
, bool);
190 static tree
fold_builtin_2 (tree
, tree
, tree
, bool);
191 static tree
fold_builtin_3 (tree
, tree
, tree
, tree
, bool);
192 static tree
fold_builtin_4 (tree
, tree
, tree
, tree
, tree
, bool);
193 static tree
fold_builtin_varargs (tree
, tree
, bool);
195 static tree
fold_builtin_strpbrk (tree
, tree
, tree
);
196 static tree
fold_builtin_strstr (tree
, tree
, tree
);
197 static tree
fold_builtin_strrchr (tree
, tree
, tree
);
198 static tree
fold_builtin_strcat (tree
, tree
);
199 static tree
fold_builtin_strncat (tree
, tree
, tree
);
200 static tree
fold_builtin_strspn (tree
, tree
);
201 static tree
fold_builtin_strcspn (tree
, tree
);
202 static tree
fold_builtin_sprintf (tree
, tree
, tree
, int);
204 static rtx
expand_builtin_object_size (tree
);
205 static rtx
expand_builtin_memory_chk (tree
, rtx
, enum machine_mode
,
206 enum built_in_function
);
207 static void maybe_emit_chk_warning (tree
, enum built_in_function
);
208 static void maybe_emit_sprintf_chk_warning (tree
, enum built_in_function
);
209 static tree
fold_builtin_object_size (tree
, tree
);
210 static tree
fold_builtin_strcat_chk (tree
, tree
, tree
, tree
);
211 static tree
fold_builtin_strncat_chk (tree
, tree
, tree
, tree
, tree
);
212 static tree
fold_builtin_sprintf_chk (tree
, enum built_in_function
);
213 static tree
fold_builtin_printf (tree
, tree
, tree
, bool, enum built_in_function
);
214 static tree
fold_builtin_fprintf (tree
, tree
, tree
, tree
, bool,
215 enum built_in_function
);
216 static bool init_target_chars (void);
218 static unsigned HOST_WIDE_INT target_newline
;
219 static unsigned HOST_WIDE_INT target_percent
;
220 static unsigned HOST_WIDE_INT target_c
;
221 static unsigned HOST_WIDE_INT target_s
;
222 static char target_percent_c
[3];
223 static char target_percent_s
[3];
224 static char target_percent_s_newline
[4];
225 static tree
do_mpfr_arg1 (tree
, tree
, int (*)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
226 const REAL_VALUE_TYPE
*, const REAL_VALUE_TYPE
*, bool);
227 static tree
do_mpfr_arg2 (tree
, tree
, tree
,
228 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
229 static tree
do_mpfr_arg3 (tree
, tree
, tree
, tree
,
230 int (*)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
));
231 static tree
do_mpfr_sincos (tree
, tree
, tree
);
233 /* Return true if NODE should be considered for inline expansion regardless
234 of the optimization level. This means whenever a function is invoked with
235 its "internal" name, which normally contains the prefix "__builtin". */
237 static bool called_as_built_in (tree node
)
239 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
240 if (strncmp (name
, "__builtin_", 10) == 0)
242 if (strncmp (name
, "__sync_", 7) == 0)
247 /* Return the alignment in bits of EXP, a pointer valued expression.
248 But don't return more than MAX_ALIGN no matter what.
249 The alignment returned is, by default, the alignment of the thing that
250 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
252 Otherwise, look at the expression to see if we can do better, i.e., if the
253 expression is actually pointing at an object whose alignment is tighter. */
256 get_pointer_alignment (tree exp
, unsigned int max_align
)
258 unsigned int align
, inner
;
260 /* We rely on TER to compute accurate alignment information. */
261 if (!(optimize
&& flag_tree_ter
))
264 if (!POINTER_TYPE_P (TREE_TYPE (exp
)))
267 align
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
268 align
= MIN (align
, max_align
);
272 switch (TREE_CODE (exp
))
276 case NON_LVALUE_EXPR
:
277 exp
= TREE_OPERAND (exp
, 0);
278 if (! POINTER_TYPE_P (TREE_TYPE (exp
)))
281 inner
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
282 align
= MIN (inner
, max_align
);
286 /* If sum of pointer + int, restrict our maximum alignment to that
287 imposed by the integer. If not, we can't do any better than
289 if (! host_integerp (TREE_OPERAND (exp
, 1), 1))
292 while (((tree_low_cst (TREE_OPERAND (exp
, 1), 1))
293 & (max_align
/ BITS_PER_UNIT
- 1))
297 exp
= TREE_OPERAND (exp
, 0);
301 /* See what we are pointing at and look at its alignment. */
302 exp
= TREE_OPERAND (exp
, 0);
304 if (handled_component_p (exp
))
306 HOST_WIDE_INT bitsize
, bitpos
;
308 enum machine_mode mode
;
309 int unsignedp
, volatilep
;
311 exp
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
312 &mode
, &unsignedp
, &volatilep
, true);
314 inner
= MIN (inner
, (unsigned) (bitpos
& -bitpos
));
315 if (offset
&& TREE_CODE (offset
) == PLUS_EXPR
316 && host_integerp (TREE_OPERAND (offset
, 1), 1))
318 /* Any overflow in calculating offset_bits won't change
321 = ((unsigned) tree_low_cst (TREE_OPERAND (offset
, 1), 1)
325 inner
= MIN (inner
, (offset_bits
& -offset_bits
));
326 offset
= TREE_OPERAND (offset
, 0);
328 if (offset
&& TREE_CODE (offset
) == MULT_EXPR
329 && host_integerp (TREE_OPERAND (offset
, 1), 1))
331 /* Any overflow in calculating offset_factor won't change
333 unsigned offset_factor
334 = ((unsigned) tree_low_cst (TREE_OPERAND (offset
, 1), 1)
338 inner
= MIN (inner
, (offset_factor
& -offset_factor
));
341 inner
= MIN (inner
, BITS_PER_UNIT
);
343 if (TREE_CODE (exp
) == FUNCTION_DECL
)
344 align
= FUNCTION_BOUNDARY
;
345 else if (DECL_P (exp
))
346 align
= MIN (inner
, DECL_ALIGN (exp
));
347 #ifdef CONSTANT_ALIGNMENT
348 else if (CONSTANT_CLASS_P (exp
))
349 align
= MIN (inner
, (unsigned)CONSTANT_ALIGNMENT (exp
, align
));
351 else if (TREE_CODE (exp
) == VIEW_CONVERT_EXPR
352 || TREE_CODE (exp
) == INDIRECT_REF
)
353 align
= MIN (TYPE_ALIGN (TREE_TYPE (exp
)), inner
);
355 align
= MIN (align
, inner
);
356 return MIN (align
, max_align
);
364 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
365 way, because it could contain a zero byte in the middle.
366 TREE_STRING_LENGTH is the size of the character array, not the string.
368 ONLY_VALUE should be nonzero if the result is not going to be emitted
369 into the instruction stream and zero if it is going to be expanded.
370 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
371 is returned, otherwise NULL, since
372 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
373 evaluate the side-effects.
375 The value returned is of type `ssizetype'.
377 Unfortunately, string_constant can't access the values of const char
378 arrays with initializers, so neither can we do so here. */
381 c_strlen (tree src
, int only_value
)
384 HOST_WIDE_INT offset
;
389 if (TREE_CODE (src
) == COND_EXPR
390 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
394 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
395 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
396 if (tree_int_cst_equal (len1
, len2
))
400 if (TREE_CODE (src
) == COMPOUND_EXPR
401 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
402 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
404 src
= string_constant (src
, &offset_node
);
408 max
= TREE_STRING_LENGTH (src
) - 1;
409 ptr
= TREE_STRING_POINTER (src
);
411 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
413 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
414 compute the offset to the following null if we don't know where to
415 start searching for it. */
418 for (i
= 0; i
< max
; i
++)
422 /* We don't know the starting offset, but we do know that the string
423 has no internal zero bytes. We can assume that the offset falls
424 within the bounds of the string; otherwise, the programmer deserves
425 what he gets. Subtract the offset from the length of the string,
426 and return that. This would perhaps not be valid if we were dealing
427 with named arrays in addition to literal string constants. */
429 return size_diffop (size_int (max
), offset_node
);
432 /* We have a known offset into the string. Start searching there for
433 a null character if we can represent it as a single HOST_WIDE_INT. */
434 if (offset_node
== 0)
436 else if (! host_integerp (offset_node
, 0))
439 offset
= tree_low_cst (offset_node
, 0);
441 /* If the offset is known to be out of bounds, warn, and call strlen at
443 if (offset
< 0 || offset
> max
)
445 warning (0, "offset outside bounds of constant string");
449 /* Use strlen to search for the first zero byte. Since any strings
450 constructed with build_string will have nulls appended, we win even
451 if we get handed something like (char[4])"abcd".
453 Since OFFSET is our starting index into the string, no further
454 calculation is needed. */
455 return ssize_int (strlen (ptr
+ offset
));
458 /* Return a char pointer for a C string if it is a string constant
459 or sum of string constant and integer constant. */
466 src
= string_constant (src
, &offset_node
);
470 if (offset_node
== 0)
471 return TREE_STRING_POINTER (src
);
472 else if (!host_integerp (offset_node
, 1)
473 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
476 return TREE_STRING_POINTER (src
) + tree_low_cst (offset_node
, 1);
479 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
480 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
483 c_readstr (const char *str
, enum machine_mode mode
)
489 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
494 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
497 if (WORDS_BIG_ENDIAN
)
498 j
= GET_MODE_SIZE (mode
) - i
- 1;
499 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
500 && GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
501 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
503 gcc_assert (j
<= 2 * HOST_BITS_PER_WIDE_INT
);
506 ch
= (unsigned char) str
[i
];
507 c
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
509 return immed_double_const (c
[0], c
[1], mode
);
512 /* Cast a target constant CST to target CHAR and if that value fits into
513 host char type, return zero and put that value into variable pointed to by
517 target_char_cast (tree cst
, char *p
)
519 unsigned HOST_WIDE_INT val
, hostval
;
521 if (!host_integerp (cst
, 1)
522 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
525 val
= tree_low_cst (cst
, 1);
526 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
527 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
530 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
531 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
540 /* Similar to save_expr, but assumes that arbitrary code is not executed
541 in between the multiple evaluations. In particular, we assume that a
542 non-addressable local variable will not be modified. */
545 builtin_save_expr (tree exp
)
547 if (TREE_ADDRESSABLE (exp
) == 0
548 && (TREE_CODE (exp
) == PARM_DECL
549 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
))))
552 return save_expr (exp
);
555 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
556 times to get the address of either a higher stack frame, or a return
557 address located within it (depending on FNDECL_CODE). */
560 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
564 #ifdef INITIAL_FRAME_ADDRESS_RTX
565 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
569 /* For a zero count with __builtin_return_address, we don't care what
570 frame address we return, because target-specific definitions will
571 override us. Therefore frame pointer elimination is OK, and using
572 the soft frame pointer is OK.
574 For a nonzero count, or a zero count with __builtin_frame_address,
575 we require a stable offset from the current frame pointer to the
576 previous one, so we must use the hard frame pointer, and
577 we must disable frame pointer elimination. */
578 if (count
== 0 && fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
579 tem
= frame_pointer_rtx
;
582 tem
= hard_frame_pointer_rtx
;
584 /* Tell reload not to eliminate the frame pointer. */
585 current_function_accesses_prior_frames
= 1;
589 /* Some machines need special handling before we can access
590 arbitrary frames. For example, on the SPARC, we must first flush
591 all register windows to the stack. */
592 #ifdef SETUP_FRAME_ADDRESSES
594 SETUP_FRAME_ADDRESSES ();
597 /* On the SPARC, the return address is not in the frame, it is in a
598 register. There is no way to access it off of the current frame
599 pointer, but it can be accessed off the previous frame pointer by
600 reading the value from the register window save area. */
601 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
602 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
606 /* Scan back COUNT frames to the specified frame. */
607 for (i
= 0; i
< count
; i
++)
609 /* Assume the dynamic chain pointer is in the word that the
610 frame address points to, unless otherwise specified. */
611 #ifdef DYNAMIC_CHAIN_ADDRESS
612 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
614 tem
= memory_address (Pmode
, tem
);
615 tem
= gen_frame_mem (Pmode
, tem
);
616 tem
= copy_to_reg (tem
);
619 /* For __builtin_frame_address, return what we've got. But, on
620 the SPARC for example, we may have to add a bias. */
621 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
622 #ifdef FRAME_ADDR_RTX
623 return FRAME_ADDR_RTX (tem
);
628 /* For __builtin_return_address, get the return address from that frame. */
629 #ifdef RETURN_ADDR_RTX
630 tem
= RETURN_ADDR_RTX (count
, tem
);
632 tem
= memory_address (Pmode
,
633 plus_constant (tem
, GET_MODE_SIZE (Pmode
)));
634 tem
= gen_frame_mem (Pmode
, tem
);
639 /* Alias set used for setjmp buffer. */
640 static HOST_WIDE_INT setjmp_alias_set
= -1;
642 /* Construct the leading half of a __builtin_setjmp call. Control will
643 return to RECEIVER_LABEL. This is also called directly by the SJLJ
644 exception handling code. */
647 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
649 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
653 if (setjmp_alias_set
== -1)
654 setjmp_alias_set
= new_alias_set ();
656 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
658 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
660 /* We store the frame pointer and the address of receiver_label in
661 the buffer and use the rest of it for the stack save area, which
662 is machine-dependent. */
664 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
665 set_mem_alias_set (mem
, setjmp_alias_set
);
666 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
668 mem
= gen_rtx_MEM (Pmode
, plus_constant (buf_addr
, GET_MODE_SIZE (Pmode
))),
669 set_mem_alias_set (mem
, setjmp_alias_set
);
671 emit_move_insn (validize_mem (mem
),
672 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
674 stack_save
= gen_rtx_MEM (sa_mode
,
675 plus_constant (buf_addr
,
676 2 * GET_MODE_SIZE (Pmode
)));
677 set_mem_alias_set (stack_save
, setjmp_alias_set
);
678 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
680 /* If there is further processing to do, do it. */
681 #ifdef HAVE_builtin_setjmp_setup
682 if (HAVE_builtin_setjmp_setup
)
683 emit_insn (gen_builtin_setjmp_setup (buf_addr
));
686 /* Tell optimize_save_area_alloca that extra work is going to
687 need to go on during alloca. */
688 current_function_calls_setjmp
= 1;
690 /* Set this so all the registers get saved in our frame; we need to be
691 able to copy the saved values for any registers from frames we unwind. */
692 current_function_has_nonlocal_label
= 1;
695 /* Construct the trailing part of a __builtin_setjmp call. This is
696 also called directly by the SJLJ exception handling code. */
699 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED
)
701 /* Clobber the FP when we get here, so we have to make sure it's
702 marked as used by this function. */
703 emit_insn (gen_rtx_USE (VOIDmode
, hard_frame_pointer_rtx
));
705 /* Mark the static chain as clobbered here so life information
706 doesn't get messed up for it. */
707 emit_insn (gen_rtx_CLOBBER (VOIDmode
, static_chain_rtx
));
709 /* Now put in the code to restore the frame pointer, and argument
710 pointer, if needed. */
711 #ifdef HAVE_nonlocal_goto
712 if (! HAVE_nonlocal_goto
)
715 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
716 /* This might change the hard frame pointer in ways that aren't
717 apparent to early optimization passes, so force a clobber. */
718 emit_insn (gen_rtx_CLOBBER (VOIDmode
, hard_frame_pointer_rtx
));
721 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
722 if (fixed_regs
[ARG_POINTER_REGNUM
])
724 #ifdef ELIMINABLE_REGS
726 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
728 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
729 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
730 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
733 if (i
== ARRAY_SIZE (elim_regs
))
736 /* Now restore our arg pointer from the address at which it
737 was saved in our stack frame. */
738 emit_move_insn (virtual_incoming_args_rtx
,
739 copy_to_reg (get_arg_pointer_save_area (cfun
)));
744 #ifdef HAVE_builtin_setjmp_receiver
745 if (HAVE_builtin_setjmp_receiver
)
746 emit_insn (gen_builtin_setjmp_receiver (receiver_label
));
749 #ifdef HAVE_nonlocal_goto_receiver
750 if (HAVE_nonlocal_goto_receiver
)
751 emit_insn (gen_nonlocal_goto_receiver ());
756 /* @@@ This is a kludge. Not all machine descriptions define a blockage
757 insn, but we must not allow the code we just generated to be reordered
758 by scheduling. Specifically, the update of the frame pointer must
759 happen immediately, not later. So emit an ASM_INPUT to act as blockage
761 emit_insn (gen_rtx_ASM_INPUT (VOIDmode
, ""));
764 /* __builtin_longjmp is passed a pointer to an array of five words (not
765 all will be used on all machines). It operates similarly to the C
766 library function of the same name, but is more efficient. Much of
767 the code below is copied from the handling of non-local gotos. */
770 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
772 rtx fp
, lab
, stack
, insn
, last
;
773 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
775 if (setjmp_alias_set
== -1)
776 setjmp_alias_set
= new_alias_set ();
778 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
780 buf_addr
= force_reg (Pmode
, buf_addr
);
782 /* We used to store value in static_chain_rtx, but that fails if pointers
783 are smaller than integers. We instead require that the user must pass
784 a second argument of 1, because that is what builtin_setjmp will
785 return. This also makes EH slightly more efficient, since we are no
786 longer copying around a value that we don't care about. */
787 gcc_assert (value
== const1_rtx
);
789 last
= get_last_insn ();
790 #ifdef HAVE_builtin_longjmp
791 if (HAVE_builtin_longjmp
)
792 emit_insn (gen_builtin_longjmp (buf_addr
));
796 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
797 lab
= gen_rtx_MEM (Pmode
, plus_constant (buf_addr
,
798 GET_MODE_SIZE (Pmode
)));
800 stack
= gen_rtx_MEM (sa_mode
, plus_constant (buf_addr
,
801 2 * GET_MODE_SIZE (Pmode
)));
802 set_mem_alias_set (fp
, setjmp_alias_set
);
803 set_mem_alias_set (lab
, setjmp_alias_set
);
804 set_mem_alias_set (stack
, setjmp_alias_set
);
806 /* Pick up FP, label, and SP from the block and jump. This code is
807 from expand_goto in stmt.c; see there for detailed comments. */
808 #ifdef HAVE_nonlocal_goto
809 if (HAVE_nonlocal_goto
)
810 /* We have to pass a value to the nonlocal_goto pattern that will
811 get copied into the static_chain pointer, but it does not matter
812 what that value is, because builtin_setjmp does not use it. */
813 emit_insn (gen_nonlocal_goto (value
, lab
, stack
, fp
));
817 lab
= copy_to_reg (lab
);
819 emit_insn (gen_rtx_CLOBBER (VOIDmode
,
820 gen_rtx_MEM (BLKmode
,
821 gen_rtx_SCRATCH (VOIDmode
))));
822 emit_insn (gen_rtx_CLOBBER (VOIDmode
,
823 gen_rtx_MEM (BLKmode
,
824 hard_frame_pointer_rtx
)));
826 emit_move_insn (hard_frame_pointer_rtx
, fp
);
827 emit_stack_restore (SAVE_NONLOCAL
, stack
, NULL_RTX
);
829 emit_insn (gen_rtx_USE (VOIDmode
, hard_frame_pointer_rtx
));
830 emit_insn (gen_rtx_USE (VOIDmode
, stack_pointer_rtx
));
831 emit_indirect_jump (lab
);
835 /* Search backwards and mark the jump insn as a non-local goto.
836 Note that this precludes the use of __builtin_longjmp to a
837 __builtin_setjmp target in the same function. However, we've
838 already cautioned the user that these functions are for
839 internal exception handling use only. */
840 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
842 gcc_assert (insn
!= last
);
846 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO
, const0_rtx
,
850 else if (CALL_P (insn
))
855 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
856 and the address of the save area. */
859 expand_builtin_nonlocal_goto (tree exp
)
861 tree t_label
, t_save_area
;
862 rtx r_label
, r_save_area
, r_fp
, r_sp
, insn
;
864 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
867 t_label
= CALL_EXPR_ARG (exp
, 0);
868 t_save_area
= CALL_EXPR_ARG (exp
, 1);
870 r_label
= expand_normal (t_label
);
871 r_label
= convert_memory_address (Pmode
, r_label
);
872 r_save_area
= expand_normal (t_save_area
);
873 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
874 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
875 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
876 plus_constant (r_save_area
, GET_MODE_SIZE (Pmode
)));
878 current_function_has_nonlocal_goto
= 1;
880 #ifdef HAVE_nonlocal_goto
881 /* ??? We no longer need to pass the static chain value, afaik. */
882 if (HAVE_nonlocal_goto
)
883 emit_insn (gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
887 r_label
= copy_to_reg (r_label
);
889 emit_insn (gen_rtx_CLOBBER (VOIDmode
,
890 gen_rtx_MEM (BLKmode
,
891 gen_rtx_SCRATCH (VOIDmode
))));
893 emit_insn (gen_rtx_CLOBBER (VOIDmode
,
894 gen_rtx_MEM (BLKmode
,
895 hard_frame_pointer_rtx
)));
897 /* Restore frame pointer for containing function.
898 This sets the actual hard register used for the frame pointer
899 to the location of the function's incoming static chain info.
900 The non-local goto handler will then adjust it to contain the
901 proper value and reload the argument pointer, if needed. */
902 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
903 emit_stack_restore (SAVE_NONLOCAL
, r_sp
, NULL_RTX
);
905 /* USE of hard_frame_pointer_rtx added for consistency;
906 not clear if really needed. */
907 emit_insn (gen_rtx_USE (VOIDmode
, hard_frame_pointer_rtx
));
908 emit_insn (gen_rtx_USE (VOIDmode
, stack_pointer_rtx
));
909 emit_indirect_jump (r_label
);
912 /* Search backwards to the jump insn and mark it as a
914 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
918 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO
,
919 const0_rtx
, REG_NOTES (insn
));
922 else if (CALL_P (insn
))
929 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
930 (not all will be used on all machines) that was passed to __builtin_setjmp.
931 It updates the stack pointer in that block to correspond to the current
935 expand_builtin_update_setjmp_buf (rtx buf_addr
)
937 enum machine_mode sa_mode
= Pmode
;
941 #ifdef HAVE_save_stack_nonlocal
942 if (HAVE_save_stack_nonlocal
)
943 sa_mode
= insn_data
[(int) CODE_FOR_save_stack_nonlocal
].operand
[0].mode
;
945 #ifdef STACK_SAVEAREA_MODE
946 sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
950 = gen_rtx_MEM (sa_mode
,
953 plus_constant (buf_addr
, 2 * GET_MODE_SIZE (Pmode
))));
957 emit_insn (gen_setjmp ());
960 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
963 /* Expand a call to __builtin_prefetch. For a target that does not support
964 data prefetch, evaluate the memory address argument in case it has side
968 expand_builtin_prefetch (tree exp
)
970 tree arg0
, arg1
, arg2
;
974 if (!validate_arglist (exp
, POINTER_TYPE
, 0))
977 arg0
= CALL_EXPR_ARG (exp
, 0);
979 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
980 zero (read) and argument 2 (locality) defaults to 3 (high degree of
982 nargs
= call_expr_nargs (exp
);
984 arg1
= CALL_EXPR_ARG (exp
, 1);
986 arg1
= integer_zero_node
;
988 arg2
= CALL_EXPR_ARG (exp
, 2);
990 arg2
= build_int_cst (NULL_TREE
, 3);
992 /* Argument 0 is an address. */
993 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
995 /* Argument 1 (read/write flag) must be a compile-time constant int. */
996 if (TREE_CODE (arg1
) != INTEGER_CST
)
998 error ("second argument to %<__builtin_prefetch%> must be a constant");
999 arg1
= integer_zero_node
;
1001 op1
= expand_normal (arg1
);
1002 /* Argument 1 must be either zero or one. */
1003 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
1005 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1010 /* Argument 2 (locality) must be a compile-time constant int. */
1011 if (TREE_CODE (arg2
) != INTEGER_CST
)
1013 error ("third argument to %<__builtin_prefetch%> must be a constant");
1014 arg2
= integer_zero_node
;
1016 op2
= expand_normal (arg2
);
1017 /* Argument 2 must be 0, 1, 2, or 3. */
1018 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
1020 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1024 #ifdef HAVE_prefetch
1027 if ((! (*insn_data
[(int) CODE_FOR_prefetch
].operand
[0].predicate
)
1029 insn_data
[(int) CODE_FOR_prefetch
].operand
[0].mode
))
1030 || (GET_MODE (op0
) != Pmode
))
1032 op0
= convert_memory_address (Pmode
, op0
);
1033 op0
= force_reg (Pmode
, op0
);
1035 emit_insn (gen_prefetch (op0
, op1
, op2
));
1039 /* Don't do anything with direct references to volatile memory, but
1040 generate code to handle other side effects. */
1041 if (!MEM_P (op0
) && side_effects_p (op0
))
1045 /* Get a MEM rtx for expression EXP which is the address of an operand
1046 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1047 the maximum length of the block of memory that might be accessed or
1051 get_memory_rtx (tree exp
, tree len
)
1053 rtx addr
= expand_expr (exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
1054 rtx mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
1056 /* Get an expression we can use to find the attributes to assign to MEM.
1057 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
1058 we can. First remove any nops. */
1059 while ((TREE_CODE (exp
) == NOP_EXPR
|| TREE_CODE (exp
) == CONVERT_EXPR
1060 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
1061 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1062 exp
= TREE_OPERAND (exp
, 0);
1064 if (TREE_CODE (exp
) == ADDR_EXPR
)
1065 exp
= TREE_OPERAND (exp
, 0);
1066 else if (POINTER_TYPE_P (TREE_TYPE (exp
)))
1067 exp
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (exp
)), exp
);
1071 /* Honor attributes derived from exp, except for the alias set
1072 (as builtin stringops may alias with anything) and the size
1073 (as stringops may access multiple array elements). */
1076 set_mem_attributes (mem
, exp
, 0);
1078 /* Allow the string and memory builtins to overflow from one
1079 field into another, see http://gcc.gnu.org/PR23561.
1080 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1081 memory accessed by the string or memory builtin will fit
1082 within the field. */
1083 if (MEM_EXPR (mem
) && TREE_CODE (MEM_EXPR (mem
)) == COMPONENT_REF
)
1085 tree mem_expr
= MEM_EXPR (mem
);
1086 HOST_WIDE_INT offset
= -1, length
= -1;
1089 while (TREE_CODE (inner
) == ARRAY_REF
1090 || TREE_CODE (inner
) == NOP_EXPR
1091 || TREE_CODE (inner
) == CONVERT_EXPR
1092 || TREE_CODE (inner
) == NON_LVALUE_EXPR
1093 || TREE_CODE (inner
) == VIEW_CONVERT_EXPR
1094 || TREE_CODE (inner
) == SAVE_EXPR
)
1095 inner
= TREE_OPERAND (inner
, 0);
1097 gcc_assert (TREE_CODE (inner
) == COMPONENT_REF
);
1099 if (MEM_OFFSET (mem
)
1100 && GET_CODE (MEM_OFFSET (mem
)) == CONST_INT
)
1101 offset
= INTVAL (MEM_OFFSET (mem
));
1103 if (offset
>= 0 && len
&& host_integerp (len
, 0))
1104 length
= tree_low_cst (len
, 0);
1106 while (TREE_CODE (inner
) == COMPONENT_REF
)
1108 tree field
= TREE_OPERAND (inner
, 1);
1109 gcc_assert (! DECL_BIT_FIELD (field
));
1110 gcc_assert (TREE_CODE (mem_expr
) == COMPONENT_REF
);
1111 gcc_assert (field
== TREE_OPERAND (mem_expr
, 1));
1114 && TYPE_SIZE_UNIT (TREE_TYPE (inner
))
1115 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner
)), 0))
1118 = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner
)), 0);
1119 /* If we can prove the memory starting at XEXP (mem, 0)
1120 and ending at XEXP (mem, 0) + LENGTH will fit into
1121 this field, we can keep that COMPONENT_REF in MEM_EXPR. */
1124 && offset
+ length
<= size
)
1129 && host_integerp (DECL_FIELD_OFFSET (field
), 0))
1130 offset
+= tree_low_cst (DECL_FIELD_OFFSET (field
), 0)
1131 + tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 1)
1139 mem_expr
= TREE_OPERAND (mem_expr
, 0);
1140 inner
= TREE_OPERAND (inner
, 0);
1143 if (mem_expr
== NULL
)
1145 if (mem_expr
!= MEM_EXPR (mem
))
1147 set_mem_expr (mem
, mem_expr
);
1148 set_mem_offset (mem
, offset
>= 0 ? GEN_INT (offset
) : NULL_RTX
);
1151 set_mem_alias_set (mem
, 0);
1152 set_mem_size (mem
, NULL_RTX
);
1158 /* Built-in functions to perform an untyped call and return. */
1160 /* For each register that may be used for calling a function, this
1161 gives a mode used to copy the register's value. VOIDmode indicates
1162 the register is not used for calling a function. If the machine
1163 has register windows, this gives only the outbound registers.
1164 INCOMING_REGNO gives the corresponding inbound register. */
1165 static enum machine_mode apply_args_mode
[FIRST_PSEUDO_REGISTER
];
1167 /* For each register that may be used for returning values, this gives
1168 a mode used to copy the register's value. VOIDmode indicates the
1169 register is not used for returning values. If the machine has
1170 register windows, this gives only the outbound registers.
1171 INCOMING_REGNO gives the corresponding inbound register. */
1172 static enum machine_mode apply_result_mode
[FIRST_PSEUDO_REGISTER
];
1174 /* For each register that may be used for calling a function, this
1175 gives the offset of that register into the block returned by
1176 __builtin_apply_args. 0 indicates that the register is not
1177 used for calling a function. */
1178 static int apply_args_reg_offset
[FIRST_PSEUDO_REGISTER
];
1180 /* Return the size required for the block returned by __builtin_apply_args,
1181 and initialize apply_args_mode. */
1184 apply_args_size (void)
1186 static int size
= -1;
1189 enum machine_mode mode
;
1191 /* The values computed by this function never change. */
1194 /* The first value is the incoming arg-pointer. */
1195 size
= GET_MODE_SIZE (Pmode
);
1197 /* The second value is the structure value address unless this is
1198 passed as an "invisible" first argument. */
1199 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1200 size
+= GET_MODE_SIZE (Pmode
);
1202 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1203 if (FUNCTION_ARG_REGNO_P (regno
))
1205 mode
= reg_raw_mode
[regno
];
1207 gcc_assert (mode
!= VOIDmode
);
1209 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1210 if (size
% align
!= 0)
1211 size
= CEIL (size
, align
) * align
;
1212 apply_args_reg_offset
[regno
] = size
;
1213 size
+= GET_MODE_SIZE (mode
);
1214 apply_args_mode
[regno
] = mode
;
1218 apply_args_mode
[regno
] = VOIDmode
;
1219 apply_args_reg_offset
[regno
] = 0;
1225 /* Return the size required for the block returned by __builtin_apply,
1226 and initialize apply_result_mode. */
1229 apply_result_size (void)
1231 static int size
= -1;
1233 enum machine_mode mode
;
1235 /* The values computed by this function never change. */
1240 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1241 if (FUNCTION_VALUE_REGNO_P (regno
))
1243 mode
= reg_raw_mode
[regno
];
1245 gcc_assert (mode
!= VOIDmode
);
1247 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1248 if (size
% align
!= 0)
1249 size
= CEIL (size
, align
) * align
;
1250 size
+= GET_MODE_SIZE (mode
);
1251 apply_result_mode
[regno
] = mode
;
1254 apply_result_mode
[regno
] = VOIDmode
;
1256 /* Allow targets that use untyped_call and untyped_return to override
1257 the size so that machine-specific information can be stored here. */
1258 #ifdef APPLY_RESULT_SIZE
1259 size
= APPLY_RESULT_SIZE
;
1265 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1266 /* Create a vector describing the result block RESULT. If SAVEP is true,
1267 the result block is used to save the values; otherwise it is used to
1268 restore the values. */
1271 result_vector (int savep
, rtx result
)
1273 int regno
, size
, align
, nelts
;
1274 enum machine_mode mode
;
1276 rtx
*savevec
= alloca (FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
1279 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1280 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1282 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1283 if (size
% align
!= 0)
1284 size
= CEIL (size
, align
) * align
;
1285 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1286 mem
= adjust_address (result
, mode
, size
);
1287 savevec
[nelts
++] = (savep
1288 ? gen_rtx_SET (VOIDmode
, mem
, reg
)
1289 : gen_rtx_SET (VOIDmode
, reg
, mem
));
1290 size
+= GET_MODE_SIZE (mode
);
1292 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1294 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1296 /* Save the state required to perform an untyped call with the same
1297 arguments as were passed to the current function. */
1300 expand_builtin_apply_args_1 (void)
1303 int size
, align
, regno
;
1304 enum machine_mode mode
;
1305 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1307 /* Create a block where the arg-pointer, structure value address,
1308 and argument registers can be saved. */
1309 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1311 /* Walk past the arg-pointer and structure value address. */
1312 size
= GET_MODE_SIZE (Pmode
);
1313 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1314 size
+= GET_MODE_SIZE (Pmode
);
1316 /* Save each register used in calling a function to the block. */
1317 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1318 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1320 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1321 if (size
% align
!= 0)
1322 size
= CEIL (size
, align
) * align
;
1324 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1326 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1327 size
+= GET_MODE_SIZE (mode
);
1330 /* Save the arg pointer to the block. */
1331 tem
= copy_to_reg (virtual_incoming_args_rtx
);
1332 #ifdef STACK_GROWS_DOWNWARD
1333 /* We need the pointer as the caller actually passed them to us, not
1334 as we might have pretended they were passed. Make sure it's a valid
1335 operand, as emit_move_insn isn't expected to handle a PLUS. */
1337 = force_operand (plus_constant (tem
, current_function_pretend_args_size
),
1340 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1342 size
= GET_MODE_SIZE (Pmode
);
1344 /* Save the structure value address unless this is passed as an
1345 "invisible" first argument. */
1346 if (struct_incoming_value
)
1348 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1349 copy_to_reg (struct_incoming_value
));
1350 size
+= GET_MODE_SIZE (Pmode
);
1353 /* Return the address of the block. */
1354 return copy_addr_to_reg (XEXP (registers
, 0));
1357 /* __builtin_apply_args returns block of memory allocated on
1358 the stack into which is stored the arg pointer, structure
1359 value address, static chain, and all the registers that might
1360 possibly be used in performing a function call. The code is
1361 moved to the start of the function so the incoming values are
1365 expand_builtin_apply_args (void)
1367 /* Don't do __builtin_apply_args more than once in a function.
1368 Save the result of the first call and reuse it. */
1369 if (apply_args_value
!= 0)
1370 return apply_args_value
;
1372 /* When this function is called, it means that registers must be
1373 saved on entry to this function. So we migrate the
1374 call to the first insn of this function. */
1379 temp
= expand_builtin_apply_args_1 ();
1383 apply_args_value
= temp
;
1385 /* Put the insns after the NOTE that starts the function.
1386 If this is inside a start_sequence, make the outer-level insn
1387 chain current, so the code is placed at the start of the
1389 push_topmost_sequence ();
1390 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1391 pop_topmost_sequence ();
1396 /* Perform an untyped call and save the state required to perform an
1397 untyped return of whatever value was returned by the given function. */
1400 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1402 int size
, align
, regno
;
1403 enum machine_mode mode
;
1404 rtx incoming_args
, result
, reg
, dest
, src
, call_insn
;
1405 rtx old_stack_level
= 0;
1406 rtx call_fusage
= 0;
1407 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1409 arguments
= convert_memory_address (Pmode
, arguments
);
1411 /* Create a block where the return registers can be saved. */
1412 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1414 /* Fetch the arg pointer from the ARGUMENTS block. */
1415 incoming_args
= gen_reg_rtx (Pmode
);
1416 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1417 #ifndef STACK_GROWS_DOWNWARD
1418 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1419 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1422 /* Push a new argument block and copy the arguments. Do not allow
1423 the (potential) memcpy call below to interfere with our stack
1425 do_pending_stack_adjust ();
1428 /* Save the stack with nonlocal if available. */
1429 #ifdef HAVE_save_stack_nonlocal
1430 if (HAVE_save_stack_nonlocal
)
1431 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
, NULL_RTX
);
1434 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
1436 /* Allocate a block of memory onto the stack and copy the memory
1437 arguments to the outgoing arguments address. */
1438 allocate_dynamic_stack_space (argsize
, 0, BITS_PER_UNIT
);
1439 dest
= virtual_outgoing_args_rtx
;
1440 #ifndef STACK_GROWS_DOWNWARD
1441 if (GET_CODE (argsize
) == CONST_INT
)
1442 dest
= plus_constant (dest
, -INTVAL (argsize
));
1444 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1446 dest
= gen_rtx_MEM (BLKmode
, dest
);
1447 set_mem_align (dest
, PARM_BOUNDARY
);
1448 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1449 set_mem_align (src
, PARM_BOUNDARY
);
1450 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1452 /* Refer to the argument block. */
1454 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1455 set_mem_align (arguments
, PARM_BOUNDARY
);
1457 /* Walk past the arg-pointer and structure value address. */
1458 size
= GET_MODE_SIZE (Pmode
);
1460 size
+= GET_MODE_SIZE (Pmode
);
1462 /* Restore each of the registers previously saved. Make USE insns
1463 for each of these registers for use in making the call. */
1464 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1465 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1467 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1468 if (size
% align
!= 0)
1469 size
= CEIL (size
, align
) * align
;
1470 reg
= gen_rtx_REG (mode
, regno
);
1471 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1472 use_reg (&call_fusage
, reg
);
1473 size
+= GET_MODE_SIZE (mode
);
1476 /* Restore the structure value address unless this is passed as an
1477 "invisible" first argument. */
1478 size
= GET_MODE_SIZE (Pmode
);
1481 rtx value
= gen_reg_rtx (Pmode
);
1482 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1483 emit_move_insn (struct_value
, value
);
1484 if (REG_P (struct_value
))
1485 use_reg (&call_fusage
, struct_value
);
1486 size
+= GET_MODE_SIZE (Pmode
);
1489 /* All arguments and registers used for the call are set up by now! */
1490 function
= prepare_call_address (function
, NULL
, &call_fusage
, 0, 0);
1492 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1493 and we don't want to load it into a register as an optimization,
1494 because prepare_call_address already did it if it should be done. */
1495 if (GET_CODE (function
) != SYMBOL_REF
)
1496 function
= memory_address (FUNCTION_MODE
, function
);
1498 /* Generate the actual call instruction and save the return value. */
1499 #ifdef HAVE_untyped_call
1500 if (HAVE_untyped_call
)
1501 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE
, function
),
1502 result
, result_vector (1, result
)));
1505 #ifdef HAVE_call_value
1506 if (HAVE_call_value
)
1510 /* Locate the unique return register. It is not possible to
1511 express a call that sets more than one return register using
1512 call_value; use untyped_call for that. In fact, untyped_call
1513 only needs to save the return registers in the given block. */
1514 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1515 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1517 gcc_assert (!valreg
); /* HAVE_untyped_call required. */
1519 valreg
= gen_rtx_REG (mode
, regno
);
1522 emit_call_insn (GEN_CALL_VALUE (valreg
,
1523 gen_rtx_MEM (FUNCTION_MODE
, function
),
1524 const0_rtx
, NULL_RTX
, const0_rtx
));
1526 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1532 /* Find the CALL insn we just emitted, and attach the register usage
1534 call_insn
= last_call_insn ();
1535 add_function_usage_to (call_insn
, call_fusage
);
1537 /* Restore the stack. */
1538 #ifdef HAVE_save_stack_nonlocal
1539 if (HAVE_save_stack_nonlocal
)
1540 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
, NULL_RTX
);
1543 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
1547 /* Return the address of the result block. */
1548 result
= copy_addr_to_reg (XEXP (result
, 0));
1549 return convert_memory_address (ptr_mode
, result
);
1552 /* Perform an untyped return. */
1555 expand_builtin_return (rtx result
)
1557 int size
, align
, regno
;
1558 enum machine_mode mode
;
1560 rtx call_fusage
= 0;
1562 result
= convert_memory_address (Pmode
, result
);
1564 apply_result_size ();
1565 result
= gen_rtx_MEM (BLKmode
, result
);
1567 #ifdef HAVE_untyped_return
1568 if (HAVE_untyped_return
)
1570 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
1576 /* Restore the return value and note that each value is used. */
1578 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1579 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1581 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1582 if (size
% align
!= 0)
1583 size
= CEIL (size
, align
) * align
;
1584 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1585 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1587 push_to_sequence (call_fusage
);
1588 emit_insn (gen_rtx_USE (VOIDmode
, reg
));
1589 call_fusage
= get_insns ();
1591 size
+= GET_MODE_SIZE (mode
);
1594 /* Put the USE insns before the return. */
1595 emit_insn (call_fusage
);
1597 /* Return whatever values was restored by jumping directly to the end
1599 expand_naked_return ();
1602 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1604 static enum type_class
1605 type_to_class (tree type
)
1607 switch (TREE_CODE (type
))
1609 case VOID_TYPE
: return void_type_class
;
1610 case INTEGER_TYPE
: return integer_type_class
;
1611 case ENUMERAL_TYPE
: return enumeral_type_class
;
1612 case BOOLEAN_TYPE
: return boolean_type_class
;
1613 case POINTER_TYPE
: return pointer_type_class
;
1614 case REFERENCE_TYPE
: return reference_type_class
;
1615 case OFFSET_TYPE
: return offset_type_class
;
1616 case REAL_TYPE
: return real_type_class
;
1617 case COMPLEX_TYPE
: return complex_type_class
;
1618 case FUNCTION_TYPE
: return function_type_class
;
1619 case METHOD_TYPE
: return method_type_class
;
1620 case RECORD_TYPE
: return record_type_class
;
1622 case QUAL_UNION_TYPE
: return union_type_class
;
1623 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1624 ? string_type_class
: array_type_class
);
1625 case LANG_TYPE
: return lang_type_class
;
1626 default: return no_type_class
;
1630 /* Expand a call EXP to __builtin_classify_type. */
1633 expand_builtin_classify_type (tree exp
)
1635 if (call_expr_nargs (exp
))
1636 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))));
1637 return GEN_INT (no_type_class
);
1640 /* This helper macro, meant to be used in mathfn_built_in below,
1641 determines which among a set of three builtin math functions is
1642 appropriate for a given type mode. The `F' and `L' cases are
1643 automatically generated from the `double' case. */
1644 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1645 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1646 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1647 fcodel = BUILT_IN_MATHFN##L ; break;
1649 /* Return mathematic function equivalent to FN but operating directly
1650 on TYPE, if available. If we can't do the conversion, return zero. */
1652 mathfn_built_in (tree type
, enum built_in_function fn
)
1654 enum built_in_function fcode
, fcodef
, fcodel
;
1658 CASE_MATHFN (BUILT_IN_ACOS
)
1659 CASE_MATHFN (BUILT_IN_ACOSH
)
1660 CASE_MATHFN (BUILT_IN_ASIN
)
1661 CASE_MATHFN (BUILT_IN_ASINH
)
1662 CASE_MATHFN (BUILT_IN_ATAN
)
1663 CASE_MATHFN (BUILT_IN_ATAN2
)
1664 CASE_MATHFN (BUILT_IN_ATANH
)
1665 CASE_MATHFN (BUILT_IN_CBRT
)
1666 CASE_MATHFN (BUILT_IN_CEIL
)
1667 CASE_MATHFN (BUILT_IN_CEXPI
)
1668 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1669 CASE_MATHFN (BUILT_IN_COS
)
1670 CASE_MATHFN (BUILT_IN_COSH
)
1671 CASE_MATHFN (BUILT_IN_DREM
)
1672 CASE_MATHFN (BUILT_IN_ERF
)
1673 CASE_MATHFN (BUILT_IN_ERFC
)
1674 CASE_MATHFN (BUILT_IN_EXP
)
1675 CASE_MATHFN (BUILT_IN_EXP10
)
1676 CASE_MATHFN (BUILT_IN_EXP2
)
1677 CASE_MATHFN (BUILT_IN_EXPM1
)
1678 CASE_MATHFN (BUILT_IN_FABS
)
1679 CASE_MATHFN (BUILT_IN_FDIM
)
1680 CASE_MATHFN (BUILT_IN_FLOOR
)
1681 CASE_MATHFN (BUILT_IN_FMA
)
1682 CASE_MATHFN (BUILT_IN_FMAX
)
1683 CASE_MATHFN (BUILT_IN_FMIN
)
1684 CASE_MATHFN (BUILT_IN_FMOD
)
1685 CASE_MATHFN (BUILT_IN_FREXP
)
1686 CASE_MATHFN (BUILT_IN_GAMMA
)
1687 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1688 CASE_MATHFN (BUILT_IN_HYPOT
)
1689 CASE_MATHFN (BUILT_IN_ILOGB
)
1690 CASE_MATHFN (BUILT_IN_INF
)
1691 CASE_MATHFN (BUILT_IN_ISINF
)
1692 CASE_MATHFN (BUILT_IN_J0
)
1693 CASE_MATHFN (BUILT_IN_J1
)
1694 CASE_MATHFN (BUILT_IN_JN
)
1695 CASE_MATHFN (BUILT_IN_LCEIL
)
1696 CASE_MATHFN (BUILT_IN_LDEXP
)
1697 CASE_MATHFN (BUILT_IN_LFLOOR
)
1698 CASE_MATHFN (BUILT_IN_LGAMMA
)
1699 CASE_MATHFN (BUILT_IN_LLCEIL
)
1700 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1701 CASE_MATHFN (BUILT_IN_LLRINT
)
1702 CASE_MATHFN (BUILT_IN_LLROUND
)
1703 CASE_MATHFN (BUILT_IN_LOG
)
1704 CASE_MATHFN (BUILT_IN_LOG10
)
1705 CASE_MATHFN (BUILT_IN_LOG1P
)
1706 CASE_MATHFN (BUILT_IN_LOG2
)
1707 CASE_MATHFN (BUILT_IN_LOGB
)
1708 CASE_MATHFN (BUILT_IN_LRINT
)
1709 CASE_MATHFN (BUILT_IN_LROUND
)
1710 CASE_MATHFN (BUILT_IN_MODF
)
1711 CASE_MATHFN (BUILT_IN_NAN
)
1712 CASE_MATHFN (BUILT_IN_NANS
)
1713 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1714 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1715 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1716 CASE_MATHFN (BUILT_IN_POW
)
1717 CASE_MATHFN (BUILT_IN_POWI
)
1718 CASE_MATHFN (BUILT_IN_POW10
)
1719 CASE_MATHFN (BUILT_IN_REMAINDER
)
1720 CASE_MATHFN (BUILT_IN_REMQUO
)
1721 CASE_MATHFN (BUILT_IN_RINT
)
1722 CASE_MATHFN (BUILT_IN_ROUND
)
1723 CASE_MATHFN (BUILT_IN_SCALB
)
1724 CASE_MATHFN (BUILT_IN_SCALBLN
)
1725 CASE_MATHFN (BUILT_IN_SCALBN
)
1726 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1727 CASE_MATHFN (BUILT_IN_SIN
)
1728 CASE_MATHFN (BUILT_IN_SINCOS
)
1729 CASE_MATHFN (BUILT_IN_SINH
)
1730 CASE_MATHFN (BUILT_IN_SQRT
)
1731 CASE_MATHFN (BUILT_IN_TAN
)
1732 CASE_MATHFN (BUILT_IN_TANH
)
1733 CASE_MATHFN (BUILT_IN_TGAMMA
)
1734 CASE_MATHFN (BUILT_IN_TRUNC
)
1735 CASE_MATHFN (BUILT_IN_Y0
)
1736 CASE_MATHFN (BUILT_IN_Y1
)
1737 CASE_MATHFN (BUILT_IN_YN
)
1743 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1744 return implicit_built_in_decls
[fcode
];
1745 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1746 return implicit_built_in_decls
[fcodef
];
1747 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1748 return implicit_built_in_decls
[fcodel
];
1753 /* If errno must be maintained, expand the RTL to check if the result,
1754 TARGET, of a built-in function call, EXP, is NaN, and if so set
1758 expand_errno_check (tree exp
, rtx target
)
1760 rtx lab
= gen_label_rtx ();
1762 /* Test the result; if it is NaN, set errno=EDOM because
1763 the argument was not in the domain. */
1764 emit_cmp_and_jump_insns (target
, target
, EQ
, 0, GET_MODE (target
),
1768 /* If this built-in doesn't throw an exception, set errno directly. */
1769 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp
), 0)))
1771 #ifdef GEN_ERRNO_RTX
1772 rtx errno_rtx
= GEN_ERRNO_RTX
;
1775 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
1777 emit_move_insn (errno_rtx
, GEN_INT (TARGET_EDOM
));
1783 /* We can't set errno=EDOM directly; let the library call do it.
1784 Pop the arguments right away in case the call gets deleted. */
1786 expand_call (exp
, target
, 0);
1791 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1792 Return NULL_RTX if a normal call should be emitted rather than expanding
1793 the function in-line. EXP is the expression that is a call to the builtin
1794 function; if convenient, the result should be placed in TARGET.
1795 SUBTARGET may be used as the target for computing one of EXP's operands. */
1798 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
1800 optab builtin_optab
;
1801 rtx op0
, insns
, before_call
;
1802 tree fndecl
= get_callee_fndecl (exp
);
1803 enum machine_mode mode
;
1804 bool errno_set
= false;
1807 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
1810 arg
= CALL_EXPR_ARG (exp
, 0);
1812 switch (DECL_FUNCTION_CODE (fndecl
))
1814 CASE_FLT_FN (BUILT_IN_SQRT
):
1815 errno_set
= ! tree_expr_nonnegative_p (arg
);
1816 builtin_optab
= sqrt_optab
;
1818 CASE_FLT_FN (BUILT_IN_EXP
):
1819 errno_set
= true; builtin_optab
= exp_optab
; break;
1820 CASE_FLT_FN (BUILT_IN_EXP10
):
1821 CASE_FLT_FN (BUILT_IN_POW10
):
1822 errno_set
= true; builtin_optab
= exp10_optab
; break;
1823 CASE_FLT_FN (BUILT_IN_EXP2
):
1824 errno_set
= true; builtin_optab
= exp2_optab
; break;
1825 CASE_FLT_FN (BUILT_IN_EXPM1
):
1826 errno_set
= true; builtin_optab
= expm1_optab
; break;
1827 CASE_FLT_FN (BUILT_IN_LOGB
):
1828 errno_set
= true; builtin_optab
= logb_optab
; break;
1829 CASE_FLT_FN (BUILT_IN_LOG
):
1830 errno_set
= true; builtin_optab
= log_optab
; break;
1831 CASE_FLT_FN (BUILT_IN_LOG10
):
1832 errno_set
= true; builtin_optab
= log10_optab
; break;
1833 CASE_FLT_FN (BUILT_IN_LOG2
):
1834 errno_set
= true; builtin_optab
= log2_optab
; break;
1835 CASE_FLT_FN (BUILT_IN_LOG1P
):
1836 errno_set
= true; builtin_optab
= log1p_optab
; break;
1837 CASE_FLT_FN (BUILT_IN_ASIN
):
1838 builtin_optab
= asin_optab
; break;
1839 CASE_FLT_FN (BUILT_IN_ACOS
):
1840 builtin_optab
= acos_optab
; break;
1841 CASE_FLT_FN (BUILT_IN_TAN
):
1842 builtin_optab
= tan_optab
; break;
1843 CASE_FLT_FN (BUILT_IN_ATAN
):
1844 builtin_optab
= atan_optab
; break;
1845 CASE_FLT_FN (BUILT_IN_FLOOR
):
1846 builtin_optab
= floor_optab
; break;
1847 CASE_FLT_FN (BUILT_IN_CEIL
):
1848 builtin_optab
= ceil_optab
; break;
1849 CASE_FLT_FN (BUILT_IN_TRUNC
):
1850 builtin_optab
= btrunc_optab
; break;
1851 CASE_FLT_FN (BUILT_IN_ROUND
):
1852 builtin_optab
= round_optab
; break;
1853 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
1854 builtin_optab
= nearbyint_optab
;
1855 if (flag_trapping_math
)
1857 /* Else fallthrough and expand as rint. */
1858 CASE_FLT_FN (BUILT_IN_RINT
):
1859 builtin_optab
= rint_optab
; break;
1864 /* Make a suitable register to place result in. */
1865 mode
= TYPE_MODE (TREE_TYPE (exp
));
1867 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
1870 /* Before working hard, check whether the instruction is available. */
1871 if (builtin_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
1873 target
= gen_reg_rtx (mode
);
1875 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1876 need to expand the argument again. This way, we will not perform
1877 side-effects more the once. */
1878 narg
= builtin_save_expr (arg
);
1882 exp
= build_call_expr (fndecl
, 1, arg
);
1885 op0
= expand_expr (arg
, subtarget
, VOIDmode
, 0);
1889 /* Compute into TARGET.
1890 Set TARGET to wherever the result comes back. */
1891 target
= expand_unop (mode
, builtin_optab
, op0
, target
, 0);
1896 expand_errno_check (exp
, target
);
1898 /* Output the entire sequence. */
1899 insns
= get_insns ();
1905 /* If we were unable to expand via the builtin, stop the sequence
1906 (without outputting the insns) and call to the library function
1907 with the stabilized argument list. */
1911 before_call
= get_last_insn ();
1913 target
= expand_call (exp
, target
, target
== const0_rtx
);
1915 /* If this is a sqrt operation and we don't care about errno, try to
1916 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1917 This allows the semantics of the libcall to be visible to the RTL
1919 if (builtin_optab
== sqrt_optab
&& !errno_set
)
1921 /* Search backwards through the insns emitted by expand_call looking
1922 for the instruction with the REG_RETVAL note. */
1923 rtx last
= get_last_insn ();
1924 while (last
!= before_call
)
1926 if (find_reg_note (last
, REG_RETVAL
, NULL
))
1928 rtx note
= find_reg_note (last
, REG_EQUAL
, NULL
);
1929 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1930 two elements, i.e. symbol_ref(sqrt) and the operand. */
1932 && GET_CODE (note
) == EXPR_LIST
1933 && GET_CODE (XEXP (note
, 0)) == EXPR_LIST
1934 && XEXP (XEXP (note
, 0), 1) != NULL_RTX
1935 && XEXP (XEXP (XEXP (note
, 0), 1), 1) == NULL_RTX
)
1937 rtx operand
= XEXP (XEXP (XEXP (note
, 0), 1), 0);
1938 /* Check operand is a register with expected mode. */
1941 && GET_MODE (operand
) == mode
)
1943 /* Replace the REG_EQUAL note with a SQRT rtx. */
1944 rtx equiv
= gen_rtx_SQRT (mode
, operand
);
1945 set_unique_reg_note (last
, REG_EQUAL
, equiv
);
1950 last
= PREV_INSN (last
);
1957 /* Expand a call to the builtin binary math functions (pow and atan2).
1958 Return NULL_RTX if a normal call should be emitted rather than expanding the
1959 function in-line. EXP is the expression that is a call to the builtin
1960 function; if convenient, the result should be placed in TARGET.
1961 SUBTARGET may be used as the target for computing one of EXP's
1965 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
1967 optab builtin_optab
;
1968 rtx op0
, op1
, insns
;
1969 int op1_type
= REAL_TYPE
;
1970 tree fndecl
= get_callee_fndecl (exp
);
1971 tree arg0
, arg1
, narg
;
1972 enum machine_mode mode
;
1973 bool errno_set
= true;
1976 switch (DECL_FUNCTION_CODE (fndecl
))
1978 CASE_FLT_FN (BUILT_IN_SCALBN
):
1979 CASE_FLT_FN (BUILT_IN_SCALBLN
):
1980 CASE_FLT_FN (BUILT_IN_LDEXP
):
1981 op1_type
= INTEGER_TYPE
;
1986 if (!validate_arglist (exp
, REAL_TYPE
, op1_type
, VOID_TYPE
))
1989 arg0
= CALL_EXPR_ARG (exp
, 0);
1990 arg1
= CALL_EXPR_ARG (exp
, 1);
1992 switch (DECL_FUNCTION_CODE (fndecl
))
1994 CASE_FLT_FN (BUILT_IN_POW
):
1995 builtin_optab
= pow_optab
; break;
1996 CASE_FLT_FN (BUILT_IN_ATAN2
):
1997 builtin_optab
= atan2_optab
; break;
1998 CASE_FLT_FN (BUILT_IN_SCALB
):
1999 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2001 builtin_optab
= scalb_optab
; break;
2002 CASE_FLT_FN (BUILT_IN_SCALBN
):
2003 CASE_FLT_FN (BUILT_IN_SCALBLN
):
2004 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp
)))->b
!= 2)
2006 /* Fall through... */
2007 CASE_FLT_FN (BUILT_IN_LDEXP
):
2008 builtin_optab
= ldexp_optab
; break;
2009 CASE_FLT_FN (BUILT_IN_FMOD
):
2010 builtin_optab
= fmod_optab
; break;
2011 CASE_FLT_FN (BUILT_IN_REMAINDER
):
2012 CASE_FLT_FN (BUILT_IN_DREM
):
2013 builtin_optab
= remainder_optab
; break;
2018 /* Make a suitable register to place result in. */
2019 mode
= TYPE_MODE (TREE_TYPE (exp
));
2021 /* Before working hard, check whether the instruction is available. */
2022 if (builtin_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
2025 target
= gen_reg_rtx (mode
);
2027 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2030 /* Always stabilize the argument list. */
2031 narg
= builtin_save_expr (arg1
);
2037 narg
= builtin_save_expr (arg0
);
2045 exp
= build_call_expr (fndecl
, 2, arg0
, arg1
);
2047 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
2048 op1
= expand_normal (arg1
);
2052 /* Compute into TARGET.
2053 Set TARGET to wherever the result comes back. */
2054 target
= expand_binop (mode
, builtin_optab
, op0
, op1
,
2055 target
, 0, OPTAB_DIRECT
);
2057 /* If we were unable to expand via the builtin, stop the sequence
2058 (without outputting the insns) and call to the library function
2059 with the stabilized argument list. */
2063 return expand_call (exp
, target
, target
== const0_rtx
);
2067 expand_errno_check (exp
, target
);
2069 /* Output the entire sequence. */
2070 insns
= get_insns ();
2077 /* Expand a call to the builtin sin and cos math functions.
2078 Return NULL_RTX if a normal call should be emitted rather than expanding the
2079 function in-line. EXP is the expression that is a call to the builtin
2080 function; if convenient, the result should be placed in TARGET.
2081 SUBTARGET may be used as the target for computing one of EXP's
2085 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2087 optab builtin_optab
;
2089 tree fndecl
= get_callee_fndecl (exp
);
2090 enum machine_mode mode
;
2093 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2096 arg
= CALL_EXPR_ARG (exp
, 0);
2098 switch (DECL_FUNCTION_CODE (fndecl
))
2100 CASE_FLT_FN (BUILT_IN_SIN
):
2101 CASE_FLT_FN (BUILT_IN_COS
):
2102 builtin_optab
= sincos_optab
; break;
2107 /* Make a suitable register to place result in. */
2108 mode
= TYPE_MODE (TREE_TYPE (exp
));
2110 /* Check if sincos insn is available, otherwise fallback
2111 to sin or cos insn. */
2112 if (builtin_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
2113 switch (DECL_FUNCTION_CODE (fndecl
))
2115 CASE_FLT_FN (BUILT_IN_SIN
):
2116 builtin_optab
= sin_optab
; break;
2117 CASE_FLT_FN (BUILT_IN_COS
):
2118 builtin_optab
= cos_optab
; break;
2123 /* Before working hard, check whether the instruction is available. */
2124 if (builtin_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2126 target
= gen_reg_rtx (mode
);
2128 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2129 need to expand the argument again. This way, we will not perform
2130 side-effects more the once. */
2131 narg
= save_expr (arg
);
2135 exp
= build_call_expr (fndecl
, 1, arg
);
2138 op0
= expand_expr (arg
, subtarget
, VOIDmode
, 0);
2142 /* Compute into TARGET.
2143 Set TARGET to wherever the result comes back. */
2144 if (builtin_optab
== sincos_optab
)
2148 switch (DECL_FUNCTION_CODE (fndecl
))
2150 CASE_FLT_FN (BUILT_IN_SIN
):
2151 result
= expand_twoval_unop (builtin_optab
, op0
, 0, target
, 0);
2153 CASE_FLT_FN (BUILT_IN_COS
):
2154 result
= expand_twoval_unop (builtin_optab
, op0
, target
, 0, 0);
2159 gcc_assert (result
);
2163 target
= expand_unop (mode
, builtin_optab
, op0
, target
, 0);
2168 /* Output the entire sequence. */
2169 insns
= get_insns ();
2175 /* If we were unable to expand via the builtin, stop the sequence
2176 (without outputting the insns) and call to the library function
2177 with the stabilized argument list. */
2181 target
= expand_call (exp
, target
, target
== const0_rtx
);
2186 /* Expand a call to one of the builtin math functions that operate on
2187 floating point argument and output an integer result (ilogb, isinf,
2189 Return 0 if a normal call should be emitted rather than expanding the
2190 function in-line. EXP is the expression that is a call to the builtin
2191 function; if convenient, the result should be placed in TARGET.
2192 SUBTARGET may be used as the target for computing one of EXP's operands. */
2195 expand_builtin_interclass_mathfn (tree exp
, rtx target
, rtx subtarget
)
2197 optab builtin_optab
;
2198 enum insn_code icode
;
2200 tree fndecl
= get_callee_fndecl (exp
);
2201 enum machine_mode mode
;
2202 bool errno_set
= false;
2205 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2208 arg
= CALL_EXPR_ARG (exp
, 0);
2210 switch (DECL_FUNCTION_CODE (fndecl
))
2212 CASE_FLT_FN (BUILT_IN_ILOGB
):
2213 errno_set
= true; builtin_optab
= ilogb_optab
; break;
2214 CASE_FLT_FN (BUILT_IN_ISINF
):
2215 builtin_optab
= isinf_optab
; break;
2220 /* There's no easy way to detect the case we need to set EDOM. */
2221 if (flag_errno_math
&& errno_set
)
2224 /* Optab mode depends on the mode of the input argument. */
2225 mode
= TYPE_MODE (TREE_TYPE (arg
));
2227 icode
= builtin_optab
->handlers
[(int) mode
].insn_code
;
2229 /* Before working hard, check whether the instruction is available. */
2230 if (icode
!= CODE_FOR_nothing
)
2232 /* Make a suitable register to place result in. */
2234 || GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
2235 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
2237 gcc_assert (insn_data
[icode
].operand
[0].predicate
2238 (target
, GET_MODE (target
)));
2240 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2241 need to expand the argument again. This way, we will not perform
2242 side-effects more the once. */
2243 narg
= builtin_save_expr (arg
);
2247 exp
= build_call_expr (fndecl
, 1, arg
);
2250 op0
= expand_expr (arg
, subtarget
, VOIDmode
, 0);
2252 if (mode
!= GET_MODE (op0
))
2253 op0
= convert_to_mode (mode
, op0
, 0);
2255 /* Compute into TARGET.
2256 Set TARGET to wherever the result comes back. */
2257 emit_unop_insn (icode
, target
, op0
, UNKNOWN
);
2261 target
= expand_call (exp
, target
, target
== const0_rtx
);
2266 /* Expand a call to the builtin sincos math function.
2267 Return NULL_RTX if a normal call should be emitted rather than expanding the
2268 function in-line. EXP is the expression that is a call to the builtin
2272 expand_builtin_sincos (tree exp
)
2274 rtx op0
, op1
, op2
, target1
, target2
;
2275 enum machine_mode mode
;
2276 tree arg
, sinp
, cosp
;
2279 if (!validate_arglist (exp
, REAL_TYPE
,
2280 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2283 arg
= CALL_EXPR_ARG (exp
, 0);
2284 sinp
= CALL_EXPR_ARG (exp
, 1);
2285 cosp
= CALL_EXPR_ARG (exp
, 2);
2287 /* Make a suitable register to place result in. */
2288 mode
= TYPE_MODE (TREE_TYPE (arg
));
2290 /* Check if sincos insn is available, otherwise emit the call. */
2291 if (sincos_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
2294 target1
= gen_reg_rtx (mode
);
2295 target2
= gen_reg_rtx (mode
);
2297 op0
= expand_normal (arg
);
2298 op1
= expand_normal (build_fold_indirect_ref (sinp
));
2299 op2
= expand_normal (build_fold_indirect_ref (cosp
));
2301 /* Compute into target1 and target2.
2302 Set TARGET to wherever the result comes back. */
2303 result
= expand_twoval_unop (sincos_optab
, op0
, target2
, target1
, 0);
2304 gcc_assert (result
);
2306 /* Move target1 and target2 to the memory locations indicated
2308 emit_move_insn (op1
, target1
);
2309 emit_move_insn (op2
, target2
);
2314 /* Expand a call to the internal cexpi builtin to the sincos math function.
2315 EXP is the expression that is a call to the builtin function; if convenient,
2316 the result should be placed in TARGET. SUBTARGET may be used as the target
2317 for computing one of EXP's operands. */
2320 expand_builtin_cexpi (tree exp
, rtx target
, rtx subtarget
)
2322 tree fndecl
= get_callee_fndecl (exp
);
2324 enum machine_mode mode
;
2327 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2330 arg
= CALL_EXPR_ARG (exp
, 0);
2331 type
= TREE_TYPE (arg
);
2332 mode
= TYPE_MODE (TREE_TYPE (arg
));
2334 /* Try expanding via a sincos optab, fall back to emitting a libcall
2335 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2336 is only generated from sincos, cexp or if we have either of them. */
2337 if (sincos_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2339 op1
= gen_reg_rtx (mode
);
2340 op2
= gen_reg_rtx (mode
);
2342 op0
= expand_expr (arg
, subtarget
, VOIDmode
, 0);
2344 /* Compute into op1 and op2. */
2345 expand_twoval_unop (sincos_optab
, op0
, op2
, op1
, 0);
2347 else if (TARGET_HAS_SINCOS
)
2349 tree call
, fn
= NULL_TREE
;
2353 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2354 fn
= built_in_decls
[BUILT_IN_SINCOSF
];
2355 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2356 fn
= built_in_decls
[BUILT_IN_SINCOS
];
2357 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2358 fn
= built_in_decls
[BUILT_IN_SINCOSL
];
2362 op1
= assign_temp (TREE_TYPE (arg
), 0, 1, 1);
2363 op2
= assign_temp (TREE_TYPE (arg
), 0, 1, 1);
2364 op1a
= copy_to_mode_reg (Pmode
, XEXP (op1
, 0));
2365 op2a
= copy_to_mode_reg (Pmode
, XEXP (op2
, 0));
2366 top1
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op1a
);
2367 top2
= make_tree (build_pointer_type (TREE_TYPE (arg
)), op2a
);
2369 /* Make sure not to fold the sincos call again. */
2370 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2371 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn
)),
2372 call
, 3, arg
, top1
, top2
));
2376 tree call
, fn
= NULL_TREE
, narg
;
2377 tree ctype
= build_complex_type (type
);
2379 /* We can expand via the C99 cexp function. */
2380 gcc_assert (TARGET_C99_FUNCTIONS
);
2382 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIF
)
2383 fn
= built_in_decls
[BUILT_IN_CEXPF
];
2384 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPI
)
2385 fn
= built_in_decls
[BUILT_IN_CEXP
];
2386 else if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_CEXPIL
)
2387 fn
= built_in_decls
[BUILT_IN_CEXPL
];
2390 narg
= fold_build2 (COMPLEX_EXPR
, ctype
,
2391 build_real (type
, dconst0
), arg
);
2393 /* Make sure not to fold the cexp call again. */
2394 call
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
2395 return expand_expr (build_call_nary (ctype
, call
, 1, arg
),
2396 target
, VOIDmode
, 0);
2399 /* Now build the proper return type. */
2400 return expand_expr (build2 (COMPLEX_EXPR
, build_complex_type (type
),
2401 make_tree (TREE_TYPE (arg
), op2
),
2402 make_tree (TREE_TYPE (arg
), op1
)),
2403 target
, VOIDmode
, 0);
2406 /* Expand a call to one of the builtin rounding functions gcc defines
2407 as an extension (lfloor and lceil). As these are gcc extensions we
2408 do not need to worry about setting errno to EDOM.
2409 If expanding via optab fails, lower expression to (int)(floor(x)).
2410 EXP is the expression that is a call to the builtin function;
2411 if convenient, the result should be placed in TARGET. SUBTARGET may
2412 be used as the target for computing one of EXP's operands. */
2415 expand_builtin_int_roundingfn (tree exp
, rtx target
, rtx subtarget
)
2417 convert_optab builtin_optab
;
2418 rtx op0
, insns
, tmp
;
2419 tree fndecl
= get_callee_fndecl (exp
);
2420 enum built_in_function fallback_fn
;
2421 tree fallback_fndecl
;
2422 enum machine_mode mode
;
2425 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2428 arg
= CALL_EXPR_ARG (exp
, 0);
2430 switch (DECL_FUNCTION_CODE (fndecl
))
2432 CASE_FLT_FN (BUILT_IN_LCEIL
):
2433 CASE_FLT_FN (BUILT_IN_LLCEIL
):
2434 builtin_optab
= lceil_optab
;
2435 fallback_fn
= BUILT_IN_CEIL
;
2438 CASE_FLT_FN (BUILT_IN_LFLOOR
):
2439 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
2440 builtin_optab
= lfloor_optab
;
2441 fallback_fn
= BUILT_IN_FLOOR
;
2448 /* Make a suitable register to place result in. */
2449 mode
= TYPE_MODE (TREE_TYPE (exp
));
2451 target
= gen_reg_rtx (mode
);
2453 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2454 need to expand the argument again. This way, we will not perform
2455 side-effects more the once. */
2456 narg
= builtin_save_expr (arg
);
2460 exp
= build_call_expr (fndecl
, 1, arg
);
2463 op0
= expand_expr (arg
, subtarget
, VOIDmode
, 0);
2467 /* Compute into TARGET. */
2468 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2470 /* Output the entire sequence. */
2471 insns
= get_insns ();
2477 /* If we were unable to expand via the builtin, stop the sequence
2478 (without outputting the insns). */
2481 /* Fall back to floating point rounding optab. */
2482 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2483 /* We shouldn't get here on targets without TARGET_C99_FUNCTIONS.
2484 ??? Perhaps convert (int)floorf(x) into (int)floor((double)x). */
2485 gcc_assert (fallback_fndecl
!= NULL_TREE
);
2486 exp
= build_call_expr (fallback_fndecl
, 1, arg
);
2488 tmp
= expand_normal (exp
);
2490 /* Truncate the result of floating point optab to integer
2491 via expand_fix (). */
2492 target
= gen_reg_rtx (mode
);
2493 expand_fix (target
, tmp
, 0);
2498 /* Expand a call to one of the builtin math functions doing integer
2500 Return 0 if a normal call should be emitted rather than expanding the
2501 function in-line. EXP is the expression that is a call to the builtin
2502 function; if convenient, the result should be placed in TARGET.
2503 SUBTARGET may be used as the target for computing one of EXP's operands. */
2506 expand_builtin_int_roundingfn_2 (tree exp
, rtx target
, rtx subtarget
)
2508 convert_optab builtin_optab
;
2510 tree fndecl
= get_callee_fndecl (exp
);
2512 enum machine_mode mode
;
2514 /* There's no easy way to detect the case we need to set EDOM. */
2515 if (flag_errno_math
)
2518 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
2521 arg
= CALL_EXPR_ARG (exp
, 0);
2523 switch (DECL_FUNCTION_CODE (fndecl
))
2525 CASE_FLT_FN (BUILT_IN_LRINT
):
2526 CASE_FLT_FN (BUILT_IN_LLRINT
):
2527 builtin_optab
= lrint_optab
; break;
2528 CASE_FLT_FN (BUILT_IN_LROUND
):
2529 CASE_FLT_FN (BUILT_IN_LLROUND
):
2530 builtin_optab
= lround_optab
; break;
2535 /* Make a suitable register to place result in. */
2536 mode
= TYPE_MODE (TREE_TYPE (exp
));
2538 target
= gen_reg_rtx (mode
);
2540 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2541 need to expand the argument again. This way, we will not perform
2542 side-effects more the once. */
2543 narg
= builtin_save_expr (arg
);
2547 exp
= build_call_expr (fndecl
, 1, arg
);
2550 op0
= expand_expr (arg
, subtarget
, VOIDmode
, 0);
2554 if (expand_sfix_optab (target
, op0
, builtin_optab
))
2556 /* Output the entire sequence. */
2557 insns
= get_insns ();
2563 /* If we were unable to expand via the builtin, stop the sequence
2564 (without outputting the insns) and call to the library function
2565 with the stabilized argument list. */
2568 target
= expand_call (exp
, target
, target
== const0_rtx
);
2573 /* To evaluate powi(x,n), the floating point value x raised to the
2574 constant integer exponent n, we use a hybrid algorithm that
2575 combines the "window method" with look-up tables. For an
2576 introduction to exponentiation algorithms and "addition chains",
2577 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2578 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2579 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2580 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2582 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2583 multiplications to inline before calling the system library's pow
2584 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2585 so this default never requires calling pow, powf or powl. */
2587 #ifndef POWI_MAX_MULTS
2588 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2591 /* The size of the "optimal power tree" lookup table. All
2592 exponents less than this value are simply looked up in the
2593 powi_table below. This threshold is also used to size the
2594 cache of pseudo registers that hold intermediate results. */
2595 #define POWI_TABLE_SIZE 256
2597 /* The size, in bits of the window, used in the "window method"
2598 exponentiation algorithm. This is equivalent to a radix of
2599 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2600 #define POWI_WINDOW_SIZE 3
2602 /* The following table is an efficient representation of an
2603 "optimal power tree". For each value, i, the corresponding
2604 value, j, in the table states than an optimal evaluation
2605 sequence for calculating pow(x,i) can be found by evaluating
2606 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2607 100 integers is given in Knuth's "Seminumerical algorithms". */
2609 static const unsigned char powi_table
[POWI_TABLE_SIZE
] =
2611 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2612 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2613 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2614 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2615 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2616 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2617 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2618 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2619 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2620 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2621 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2622 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2623 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2624 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2625 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2626 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2627 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2628 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2629 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2630 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2631 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2632 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2633 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2634 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2635 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2636 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2637 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2638 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2639 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2640 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2641 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2642 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2646 /* Return the number of multiplications required to calculate
2647 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2648 subroutine of powi_cost. CACHE is an array indicating
2649 which exponents have already been calculated. */
2652 powi_lookup_cost (unsigned HOST_WIDE_INT n
, bool *cache
)
2654 /* If we've already calculated this exponent, then this evaluation
2655 doesn't require any additional multiplications. */
2660 return powi_lookup_cost (n
- powi_table
[n
], cache
)
2661 + powi_lookup_cost (powi_table
[n
], cache
) + 1;
2664 /* Return the number of multiplications required to calculate
2665 powi(x,n) for an arbitrary x, given the exponent N. This
2666 function needs to be kept in sync with expand_powi below. */
2669 powi_cost (HOST_WIDE_INT n
)
2671 bool cache
[POWI_TABLE_SIZE
];
2672 unsigned HOST_WIDE_INT digit
;
2673 unsigned HOST_WIDE_INT val
;
2679 /* Ignore the reciprocal when calculating the cost. */
2680 val
= (n
< 0) ? -n
: n
;
2682 /* Initialize the exponent cache. */
2683 memset (cache
, 0, POWI_TABLE_SIZE
* sizeof (bool));
2688 while (val
>= POWI_TABLE_SIZE
)
2692 digit
= val
& ((1 << POWI_WINDOW_SIZE
) - 1);
2693 result
+= powi_lookup_cost (digit
, cache
)
2694 + POWI_WINDOW_SIZE
+ 1;
2695 val
>>= POWI_WINDOW_SIZE
;
2704 return result
+ powi_lookup_cost (val
, cache
);
2707 /* Recursive subroutine of expand_powi. This function takes the array,
2708 CACHE, of already calculated exponents and an exponent N and returns
2709 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2712 expand_powi_1 (enum machine_mode mode
, unsigned HOST_WIDE_INT n
, rtx
*cache
)
2714 unsigned HOST_WIDE_INT digit
;
2718 if (n
< POWI_TABLE_SIZE
)
2723 target
= gen_reg_rtx (mode
);
2726 op0
= expand_powi_1 (mode
, n
- powi_table
[n
], cache
);
2727 op1
= expand_powi_1 (mode
, powi_table
[n
], cache
);
2731 target
= gen_reg_rtx (mode
);
2732 digit
= n
& ((1 << POWI_WINDOW_SIZE
) - 1);
2733 op0
= expand_powi_1 (mode
, n
- digit
, cache
);
2734 op1
= expand_powi_1 (mode
, digit
, cache
);
2738 target
= gen_reg_rtx (mode
);
2739 op0
= expand_powi_1 (mode
, n
>> 1, cache
);
2743 result
= expand_mult (mode
, op0
, op1
, target
, 0);
2744 if (result
!= target
)
2745 emit_move_insn (target
, result
);
2749 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2750 floating point operand in mode MODE, and N is the exponent. This
2751 function needs to be kept in sync with powi_cost above. */
2754 expand_powi (rtx x
, enum machine_mode mode
, HOST_WIDE_INT n
)
2756 unsigned HOST_WIDE_INT val
;
2757 rtx cache
[POWI_TABLE_SIZE
];
2761 return CONST1_RTX (mode
);
2763 val
= (n
< 0) ? -n
: n
;
2765 memset (cache
, 0, sizeof (cache
));
2768 result
= expand_powi_1 (mode
, (n
< 0) ? -n
: n
, cache
);
2770 /* If the original exponent was negative, reciprocate the result. */
2772 result
= expand_binop (mode
, sdiv_optab
, CONST1_RTX (mode
),
2773 result
, NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
2778 /* Expand a call to the pow built-in mathematical function. Return NULL_RTX if
2779 a normal call should be emitted rather than expanding the function
2780 in-line. EXP is the expression that is a call to the builtin
2781 function; if convenient, the result should be placed in TARGET. */
2784 expand_builtin_pow (tree exp
, rtx target
, rtx subtarget
)
2788 tree type
= TREE_TYPE (exp
);
2789 REAL_VALUE_TYPE cint
, c
, c2
;
2792 enum machine_mode mode
= TYPE_MODE (type
);
2794 if (! validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2797 arg0
= CALL_EXPR_ARG (exp
, 0);
2798 arg1
= CALL_EXPR_ARG (exp
, 1);
2800 if (TREE_CODE (arg1
) != REAL_CST
2801 || TREE_OVERFLOW (arg1
))
2802 return expand_builtin_mathfn_2 (exp
, target
, subtarget
);
2804 /* Handle constant exponents. */
2806 /* For integer valued exponents we can expand to an optimal multiplication
2807 sequence using expand_powi. */
2808 c
= TREE_REAL_CST (arg1
);
2809 n
= real_to_integer (&c
);
2810 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
2811 if (real_identical (&c
, &cint
)
2812 && ((n
>= -1 && n
<= 2)
2813 || (flag_unsafe_math_optimizations
2815 && powi_cost (n
) <= POWI_MAX_MULTS
)))
2817 op
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
2820 op
= force_reg (mode
, op
);
2821 op
= expand_powi (op
, mode
, n
);
2826 narg0
= builtin_save_expr (arg0
);
2828 /* If the exponent is not integer valued, check if it is half of an integer.
2829 In this case we can expand to sqrt (x) * x**(n/2). */
2830 fn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
2831 if (fn
!= NULL_TREE
)
2833 real_arithmetic (&c2
, MULT_EXPR
, &c
, &dconst2
);
2834 n
= real_to_integer (&c2
);
2835 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
2836 if (real_identical (&c2
, &cint
)
2837 && ((flag_unsafe_math_optimizations
2839 && powi_cost (n
/2) <= POWI_MAX_MULTS
)
2842 tree call_expr
= build_call_expr (fn
, 1, narg0
);
2843 op
= expand_builtin (call_expr
, NULL_RTX
, subtarget
, mode
, 0);
2846 op2
= expand_expr (narg0
, subtarget
, VOIDmode
, 0);
2847 op2
= force_reg (mode
, op2
);
2848 op2
= expand_powi (op2
, mode
, abs (n
/ 2));
2849 op
= expand_simple_binop (mode
, MULT
, op
, op2
, NULL_RTX
,
2850 0, OPTAB_LIB_WIDEN
);
2851 /* If the original exponent was negative, reciprocate the
2854 op
= expand_binop (mode
, sdiv_optab
, CONST1_RTX (mode
),
2855 op
, NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
2861 /* Try if the exponent is a third of an integer. In this case
2862 we can expand to x**(n/3) * cbrt(x)**(n%3). As cbrt (x) is
2863 different from pow (x, 1./3.) due to rounding and behavior
2864 with negative x we need to constrain this transformation to
2865 unsafe math and positive x or finite math. */
2866 fn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
2868 && flag_unsafe_math_optimizations
2869 && (tree_expr_nonnegative_p (arg0
)
2870 || !HONOR_NANS (mode
)))
2872 real_arithmetic (&c2
, MULT_EXPR
, &c
, &dconst3
);
2873 real_round (&c2
, mode
, &c2
);
2874 n
= real_to_integer (&c2
);
2875 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
2876 real_arithmetic (&c2
, RDIV_EXPR
, &cint
, &dconst3
);
2877 real_convert (&c2
, mode
, &c2
);
2878 if (real_identical (&c2
, &c
)
2880 && powi_cost (n
/3) <= POWI_MAX_MULTS
)
2883 tree call_expr
= build_call_expr (fn
, 1,narg0
);
2884 op
= expand_builtin (call_expr
, NULL_RTX
, subtarget
, mode
, 0);
2885 if (abs (n
) % 3 == 2)
2886 op
= expand_simple_binop (mode
, MULT
, op
, op
, op
,
2887 0, OPTAB_LIB_WIDEN
);
2890 op2
= expand_expr (narg0
, subtarget
, VOIDmode
, 0);
2891 op2
= force_reg (mode
, op2
);
2892 op2
= expand_powi (op2
, mode
, abs (n
/ 3));
2893 op
= expand_simple_binop (mode
, MULT
, op
, op2
, NULL_RTX
,
2894 0, OPTAB_LIB_WIDEN
);
2895 /* If the original exponent was negative, reciprocate the
2898 op
= expand_binop (mode
, sdiv_optab
, CONST1_RTX (mode
),
2899 op
, NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
2905 /* Fall back to optab expansion. */
2906 return expand_builtin_mathfn_2 (exp
, target
, subtarget
);
2909 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2910 a normal call should be emitted rather than expanding the function
2911 in-line. EXP is the expression that is a call to the builtin
2912 function; if convenient, the result should be placed in TARGET. */
2915 expand_builtin_powi (tree exp
, rtx target
, rtx subtarget
)
2919 enum machine_mode mode
;
2920 enum machine_mode mode2
;
2922 if (! validate_arglist (exp
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2925 arg0
= CALL_EXPR_ARG (exp
, 0);
2926 arg1
= CALL_EXPR_ARG (exp
, 1);
2927 mode
= TYPE_MODE (TREE_TYPE (exp
));
2929 /* Handle constant power. */
2931 if (TREE_CODE (arg1
) == INTEGER_CST
2932 && !TREE_OVERFLOW (arg1
))
2934 HOST_WIDE_INT n
= TREE_INT_CST_LOW (arg1
);
2936 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
2937 Otherwise, check the number of multiplications required. */
2938 if ((TREE_INT_CST_HIGH (arg1
) == 0
2939 || TREE_INT_CST_HIGH (arg1
) == -1)
2940 && ((n
>= -1 && n
<= 2)
2942 && powi_cost (n
) <= POWI_MAX_MULTS
)))
2944 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
2945 op0
= force_reg (mode
, op0
);
2946 return expand_powi (op0
, mode
, n
);
2950 /* Emit a libcall to libgcc. */
2952 /* Mode of the 2nd argument must match that of an int. */
2953 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
2955 if (target
== NULL_RTX
)
2956 target
= gen_reg_rtx (mode
);
2958 op0
= expand_expr (arg0
, subtarget
, mode
, 0);
2959 if (GET_MODE (op0
) != mode
)
2960 op0
= convert_to_mode (mode
, op0
, 0);
2961 op1
= expand_expr (arg1
, 0, mode2
, 0);
2962 if (GET_MODE (op1
) != mode2
)
2963 op1
= convert_to_mode (mode2
, op1
, 0);
2965 target
= emit_library_call_value (powi_optab
->handlers
[(int) mode
].libfunc
,
2966 target
, LCT_CONST_MAKE_BLOCK
, mode
, 2,
2967 op0
, mode
, op1
, mode2
);
2972 /* Expand expression EXP which is a call to the strlen builtin. Return
2973 NULL_RTX if we failed the caller should emit a normal call, otherwise
2974 try to get the result in TARGET, if convenient. */
2977 expand_builtin_strlen (tree exp
, rtx target
,
2978 enum machine_mode target_mode
)
2980 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
2986 tree src
= CALL_EXPR_ARG (exp
, 0);
2987 rtx result
, src_reg
, char_rtx
, before_strlen
;
2988 enum machine_mode insn_mode
= target_mode
, char_mode
;
2989 enum insn_code icode
= CODE_FOR_nothing
;
2992 /* If the length can be computed at compile-time, return it. */
2993 len
= c_strlen (src
, 0);
2995 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2997 /* If the length can be computed at compile-time and is constant
2998 integer, but there are side-effects in src, evaluate
2999 src for side-effects, then return len.
3000 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3001 can be optimized into: i++; x = 3; */
3002 len
= c_strlen (src
, 1);
3003 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
3005 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3006 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
3009 align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
3011 /* If SRC is not a pointer type, don't do this operation inline. */
3015 /* Bail out if we can't compute strlen in the right mode. */
3016 while (insn_mode
!= VOIDmode
)
3018 icode
= strlen_optab
->handlers
[(int) insn_mode
].insn_code
;
3019 if (icode
!= CODE_FOR_nothing
)
3022 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
3024 if (insn_mode
== VOIDmode
)
3027 /* Make a place to write the result of the instruction. */
3031 && GET_MODE (result
) == insn_mode
3032 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3033 result
= gen_reg_rtx (insn_mode
);
3035 /* Make a place to hold the source address. We will not expand
3036 the actual source until we are sure that the expansion will
3037 not fail -- there are trees that cannot be expanded twice. */
3038 src_reg
= gen_reg_rtx (Pmode
);
3040 /* Mark the beginning of the strlen sequence so we can emit the
3041 source operand later. */
3042 before_strlen
= get_last_insn ();
3044 char_rtx
= const0_rtx
;
3045 char_mode
= insn_data
[(int) icode
].operand
[2].mode
;
3046 if (! (*insn_data
[(int) icode
].operand
[2].predicate
) (char_rtx
,
3048 char_rtx
= copy_to_mode_reg (char_mode
, char_rtx
);
3050 pat
= GEN_FCN (icode
) (result
, gen_rtx_MEM (BLKmode
, src_reg
),
3051 char_rtx
, GEN_INT (align
));
3056 /* Now that we are assured of success, expand the source. */
3058 pat
= expand_expr (src
, src_reg
, ptr_mode
, EXPAND_NORMAL
);
3060 emit_move_insn (src_reg
, pat
);
3065 emit_insn_after (pat
, before_strlen
);
3067 emit_insn_before (pat
, get_insns ());
3069 /* Return the value in the proper mode for this function. */
3070 if (GET_MODE (result
) == target_mode
)
3072 else if (target
!= 0)
3073 convert_move (target
, result
, 0);
3075 target
= convert_to_mode (target_mode
, result
, 0);
3081 /* Expand a call to the strstr builtin. Return NULL_RTX if we failed the
3082 caller should emit a normal call, otherwise try to get the result
3083 in TARGET, if convenient (and in mode MODE if that's convenient). */
3086 expand_builtin_strstr (tree exp
, rtx target
, enum machine_mode mode
)
3088 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3090 tree type
= TREE_TYPE (exp
);
3091 tree result
= fold_builtin_strstr (CALL_EXPR_ARG (exp
, 0),
3092 CALL_EXPR_ARG (exp
, 1), type
);
3094 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3099 /* Expand a call to the strchr builtin. Return NULL_RTX if we failed the
3100 caller should emit a normal call, otherwise try to get the result
3101 in TARGET, if convenient (and in mode MODE if that's convenient). */
3104 expand_builtin_strchr (tree exp
, rtx target
, enum machine_mode mode
)
3106 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3108 tree type
= TREE_TYPE (exp
);
3109 tree result
= fold_builtin_strchr (CALL_EXPR_ARG (exp
, 0),
3110 CALL_EXPR_ARG (exp
, 1), type
);
3112 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3114 /* FIXME: Should use strchrM optab so that ports can optimize this. */
3119 /* Expand a call to the strrchr builtin. Return NULL_RTX if we failed the
3120 caller should emit a normal call, otherwise try to get the result
3121 in TARGET, if convenient (and in mode MODE if that's convenient). */
3124 expand_builtin_strrchr (tree exp
, rtx target
, enum machine_mode mode
)
3126 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3128 tree type
= TREE_TYPE (exp
);
3129 tree result
= fold_builtin_strrchr (CALL_EXPR_ARG (exp
, 0),
3130 CALL_EXPR_ARG (exp
, 1), type
);
3132 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3137 /* Expand a call to the strpbrk builtin. Return NULL_RTX if we failed the
3138 caller should emit a normal call, otherwise try to get the result
3139 in TARGET, if convenient (and in mode MODE if that's convenient). */
3142 expand_builtin_strpbrk (tree exp
, rtx target
, enum machine_mode mode
)
3144 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3146 tree type
= TREE_TYPE (exp
);
3147 tree result
= fold_builtin_strpbrk (CALL_EXPR_ARG (exp
, 0),
3148 CALL_EXPR_ARG (exp
, 1), type
);
3150 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3155 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3156 bytes from constant string DATA + OFFSET and return it as target
3160 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
3161 enum machine_mode mode
)
3163 const char *str
= (const char *) data
;
3165 gcc_assert (offset
>= 0
3166 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
3167 <= strlen (str
) + 1));
3169 return c_readstr (str
+ offset
, mode
);
3172 /* Expand a call EXP to the memcpy builtin.
3173 Return NULL_RTX if we failed, the caller should emit a normal call,
3174 otherwise try to get the result in TARGET, if convenient (and in
3175 mode MODE if that's convenient). */
3178 expand_builtin_memcpy (tree exp
, rtx target
, enum machine_mode mode
)
3180 tree fndecl
= get_callee_fndecl (exp
);
3182 if (!validate_arglist (exp
,
3183 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3187 tree dest
= CALL_EXPR_ARG (exp
, 0);
3188 tree src
= CALL_EXPR_ARG (exp
, 1);
3189 tree len
= CALL_EXPR_ARG (exp
, 2);
3190 const char *src_str
;
3191 unsigned int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
3192 unsigned int dest_align
3193 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3194 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
3195 tree result
= fold_builtin_memory_op (dest
, src
, len
,
3196 TREE_TYPE (TREE_TYPE (fndecl
)),
3198 HOST_WIDE_INT expected_size
= -1;
3199 unsigned int expected_align
= 0;
3203 while (TREE_CODE (result
) == COMPOUND_EXPR
)
3205 expand_expr (TREE_OPERAND (result
, 0), const0_rtx
, VOIDmode
,
3207 result
= TREE_OPERAND (result
, 1);
3209 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3212 /* If DEST is not a pointer type, call the normal function. */
3213 if (dest_align
== 0)
3216 /* If either SRC is not a pointer type, don't do this
3217 operation in-line. */
3221 stringop_block_profile (exp
, &expected_align
, &expected_size
);
3222 if (expected_align
< dest_align
)
3223 expected_align
= dest_align
;
3224 dest_mem
= get_memory_rtx (dest
, len
);
3225 set_mem_align (dest_mem
, dest_align
);
3226 len_rtx
= expand_normal (len
);
3227 src_str
= c_getstr (src
);
3229 /* If SRC is a string constant and block move would be done
3230 by pieces, we can avoid loading the string from memory
3231 and only stored the computed constants. */
3233 && GET_CODE (len_rtx
) == CONST_INT
3234 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3235 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3236 (void *) src_str
, dest_align
))
3238 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3239 builtin_memcpy_read_str
,
3240 (void *) src_str
, dest_align
, 0);
3241 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3242 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3246 src_mem
= get_memory_rtx (src
, len
);
3247 set_mem_align (src_mem
, src_align
);
3249 /* Copy word part most expediently. */
3250 dest_addr
= emit_block_move_hints (dest_mem
, src_mem
, len_rtx
,
3251 CALL_EXPR_TAILCALL (exp
)
3252 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3253 expected_align
, expected_size
);
3257 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3258 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3264 /* Expand a call EXP to the mempcpy builtin.
3265 Return NULL_RTX if we failed; the caller should emit a normal call,
3266 otherwise try to get the result in TARGET, if convenient (and in
3267 mode MODE if that's convenient). If ENDP is 0 return the
3268 destination pointer, if ENDP is 1 return the end pointer ala
3269 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3273 expand_builtin_mempcpy(tree exp
, rtx target
, enum machine_mode mode
)
3275 if (!validate_arglist (exp
,
3276 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3280 tree dest
= CALL_EXPR_ARG (exp
, 0);
3281 tree src
= CALL_EXPR_ARG (exp
, 1);
3282 tree len
= CALL_EXPR_ARG (exp
, 2);
3283 return expand_builtin_mempcpy_args (dest
, src
, len
,
3285 target
, mode
, /*endp=*/ 1);
3289 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3290 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3291 so that this can also be called without constructing an actual CALL_EXPR.
3292 TYPE is the return type of the call. The other arguments and return value
3293 are the same as for expand_builtin_mempcpy. */
3296 expand_builtin_mempcpy_args (tree dest
, tree src
, tree len
, tree type
,
3297 rtx target
, enum machine_mode mode
, int endp
)
3299 /* If return value is ignored, transform mempcpy into memcpy. */
3300 if (target
== const0_rtx
)
3302 tree fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
3307 return expand_expr (build_call_expr (fn
, 3, dest
, src
, len
),
3308 target
, mode
, EXPAND_NORMAL
);
3312 const char *src_str
;
3313 unsigned int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
3314 unsigned int dest_align
3315 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3316 rtx dest_mem
, src_mem
, len_rtx
;
3317 tree result
= fold_builtin_memory_op (dest
, src
, len
, type
, false, endp
);
3321 while (TREE_CODE (result
) == COMPOUND_EXPR
)
3323 expand_expr (TREE_OPERAND (result
, 0), const0_rtx
, VOIDmode
,
3325 result
= TREE_OPERAND (result
, 1);
3327 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3330 /* If either SRC or DEST is not a pointer type, don't do this
3331 operation in-line. */
3332 if (dest_align
== 0 || src_align
== 0)
3335 /* If LEN is not constant, call the normal function. */
3336 if (! host_integerp (len
, 1))
3339 len_rtx
= expand_normal (len
);
3340 src_str
= c_getstr (src
);
3342 /* If SRC is a string constant and block move would be done
3343 by pieces, we can avoid loading the string from memory
3344 and only stored the computed constants. */
3346 && GET_CODE (len_rtx
) == CONST_INT
3347 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
3348 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
3349 (void *) src_str
, dest_align
))
3351 dest_mem
= get_memory_rtx (dest
, len
);
3352 set_mem_align (dest_mem
, dest_align
);
3353 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
3354 builtin_memcpy_read_str
,
3355 (void *) src_str
, dest_align
, endp
);
3356 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3357 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3361 if (GET_CODE (len_rtx
) == CONST_INT
3362 && can_move_by_pieces (INTVAL (len_rtx
),
3363 MIN (dest_align
, src_align
)))
3365 dest_mem
= get_memory_rtx (dest
, len
);
3366 set_mem_align (dest_mem
, dest_align
);
3367 src_mem
= get_memory_rtx (src
, len
);
3368 set_mem_align (src_mem
, src_align
);
3369 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
3370 MIN (dest_align
, src_align
), endp
);
3371 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3372 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3380 /* Expand expression EXP, which is a call to the memmove builtin. Return
3381 NULL_RTX if we failed; the caller should emit a normal call. */
3384 expand_builtin_memmove (tree exp
, rtx target
, enum machine_mode mode
, int ignore
)
3386 if (!validate_arglist (exp
,
3387 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3391 tree dest
= CALL_EXPR_ARG (exp
, 0);
3392 tree src
= CALL_EXPR_ARG (exp
, 1);
3393 tree len
= CALL_EXPR_ARG (exp
, 2);
3394 return expand_builtin_memmove_args (dest
, src
, len
, TREE_TYPE (exp
),
3395 target
, mode
, ignore
);
3399 /* Helper function to do the actual work for expand_builtin_memmove. The
3400 arguments to the builtin_memmove call DEST, SRC, and LEN are broken out
3401 so that this can also be called without constructing an actual CALL_EXPR.
3402 TYPE is the return type of the call. The other arguments and return value
3403 are the same as for expand_builtin_memmove. */
3406 expand_builtin_memmove_args (tree dest
, tree src
, tree len
,
3407 tree type
, rtx target
, enum machine_mode mode
,
3410 tree result
= fold_builtin_memory_op (dest
, src
, len
, type
, ignore
, /*endp=*/3);
3414 while (TREE_CODE (result
) == COMPOUND_EXPR
)
3416 expand_expr (TREE_OPERAND (result
, 0), const0_rtx
, VOIDmode
,
3418 result
= TREE_OPERAND (result
, 1);
3420 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3423 /* Otherwise, call the normal function. */
3427 /* Expand expression EXP, which is a call to the bcopy builtin. Return
3428 NULL_RTX if we failed the caller should emit a normal call. */
3431 expand_builtin_bcopy (tree exp
, int ignore
)
3433 tree type
= TREE_TYPE (exp
);
3434 tree src
, dest
, size
;
3436 if (!validate_arglist (exp
,
3437 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3440 src
= CALL_EXPR_ARG (exp
, 0);
3441 dest
= CALL_EXPR_ARG (exp
, 1);
3442 size
= CALL_EXPR_ARG (exp
, 2);
3444 /* Transform bcopy(ptr x, ptr y, int z) to memmove(ptr y, ptr x, size_t z).
3445 This is done this way so that if it isn't expanded inline, we fall
3446 back to calling bcopy instead of memmove. */
3447 return expand_builtin_memmove_args (dest
, src
,
3448 fold_convert (sizetype
, size
),
3449 type
, const0_rtx
, VOIDmode
,
3454 # define HAVE_movstr 0
3455 # define CODE_FOR_movstr CODE_FOR_nothing
3458 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3459 we failed, the caller should emit a normal call, otherwise try to
3460 get the result in TARGET, if convenient. If ENDP is 0 return the
3461 destination pointer, if ENDP is 1 return the end pointer ala
3462 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3466 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3472 const struct insn_data
* data
;
3477 dest_mem
= get_memory_rtx (dest
, NULL
);
3478 src_mem
= get_memory_rtx (src
, NULL
);
3481 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3482 dest_mem
= replace_equiv_address (dest_mem
, target
);
3483 end
= gen_reg_rtx (Pmode
);
3487 if (target
== 0 || target
== const0_rtx
)
3489 end
= gen_reg_rtx (Pmode
);
3497 data
= insn_data
+ CODE_FOR_movstr
;
3499 if (data
->operand
[0].mode
!= VOIDmode
)
3500 end
= gen_lowpart (data
->operand
[0].mode
, end
);
3502 insn
= data
->genfun (end
, dest_mem
, src_mem
);
3508 /* movstr is supposed to set end to the address of the NUL
3509 terminator. If the caller requested a mempcpy-like return value,
3511 if (endp
== 1 && target
!= const0_rtx
)
3513 rtx tem
= plus_constant (gen_lowpart (GET_MODE (target
), end
), 1);
3514 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3520 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3521 NULL_RTX if we failed the caller should emit a normal call, otherwise
3522 try to get the result in TARGET, if convenient (and in mode MODE if that's
3526 expand_builtin_strcpy (tree fndecl
, tree exp
, rtx target
, enum machine_mode mode
)
3528 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3530 tree dest
= CALL_EXPR_ARG (exp
, 0);
3531 tree src
= CALL_EXPR_ARG (exp
, 1);
3532 return expand_builtin_strcpy_args (fndecl
, dest
, src
, target
, mode
);
3537 /* Helper function to do the actual work for expand_builtin_strcpy. The
3538 arguments to the builtin_strcpy call DEST and SRC are broken out
3539 so that this can also be called without constructing an actual CALL_EXPR.
3540 The other arguments and return value are the same as for
3541 expand_builtin_strcpy. */
3544 expand_builtin_strcpy_args (tree fndecl
, tree dest
, tree src
,
3545 rtx target
, enum machine_mode mode
)
3547 tree result
= fold_builtin_strcpy (fndecl
, dest
, src
, 0);
3549 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3550 return expand_movstr (dest
, src
, target
, /*endp=*/0);
3554 /* Expand a call EXP to the stpcpy builtin.
3555 Return NULL_RTX if we failed the caller should emit a normal call,
3556 otherwise try to get the result in TARGET, if convenient (and in
3557 mode MODE if that's convenient). */
3560 expand_builtin_stpcpy (tree exp
, rtx target
, enum machine_mode mode
)
3564 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3567 dst
= CALL_EXPR_ARG (exp
, 0);
3568 src
= CALL_EXPR_ARG (exp
, 1);
3570 /* If return value is ignored, transform stpcpy into strcpy. */
3571 if (target
== const0_rtx
)
3573 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
3577 return expand_expr (build_call_expr (fn
, 2, dst
, src
),
3578 target
, mode
, EXPAND_NORMAL
);
3585 /* Ensure we get an actual string whose length can be evaluated at
3586 compile-time, not an expression containing a string. This is
3587 because the latter will potentially produce pessimized code
3588 when used to produce the return value. */
3589 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3590 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3592 lenp1
= size_binop (PLUS_EXPR
, len
, ssize_int (1));
3593 ret
= expand_builtin_mempcpy_args (dst
, src
, lenp1
, TREE_TYPE (exp
),
3594 target
, mode
, /*endp=*/2);
3599 if (TREE_CODE (len
) == INTEGER_CST
)
3601 rtx len_rtx
= expand_normal (len
);
3603 if (GET_CODE (len_rtx
) == CONST_INT
)
3605 ret
= expand_builtin_strcpy_args (get_callee_fndecl (exp
),
3606 dst
, src
, target
, mode
);
3612 if (mode
!= VOIDmode
)
3613 target
= gen_reg_rtx (mode
);
3615 target
= gen_reg_rtx (GET_MODE (ret
));
3617 if (GET_MODE (target
) != GET_MODE (ret
))
3618 ret
= gen_lowpart (GET_MODE (target
), ret
);
3620 ret
= plus_constant (ret
, INTVAL (len_rtx
));
3621 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3629 return expand_movstr (dst
, src
, target
, /*endp=*/2);
3633 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3634 bytes from constant string DATA + OFFSET and return it as target
3638 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3639 enum machine_mode mode
)
3641 const char *str
= (const char *) data
;
3643 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3646 return c_readstr (str
+ offset
, mode
);
3649 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3650 NULL_RTX if we failed the caller should emit a normal call. */
3653 expand_builtin_strncpy (tree exp
, rtx target
, enum machine_mode mode
)
3655 tree fndecl
= get_callee_fndecl (exp
);
3657 if (validate_arglist (exp
,
3658 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3660 tree dest
= CALL_EXPR_ARG (exp
, 0);
3661 tree src
= CALL_EXPR_ARG (exp
, 1);
3662 tree len
= CALL_EXPR_ARG (exp
, 2);
3663 tree slen
= c_strlen (src
, 1);
3664 tree result
= fold_builtin_strncpy (fndecl
, dest
, src
, len
, slen
);
3668 while (TREE_CODE (result
) == COMPOUND_EXPR
)
3670 expand_expr (TREE_OPERAND (result
, 0), const0_rtx
, VOIDmode
,
3672 result
= TREE_OPERAND (result
, 1);
3674 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3677 /* We must be passed a constant len and src parameter. */
3678 if (!host_integerp (len
, 1) || !slen
|| !host_integerp (slen
, 1))
3681 slen
= size_binop (PLUS_EXPR
, slen
, ssize_int (1));
3683 /* We're required to pad with trailing zeros if the requested
3684 len is greater than strlen(s2)+1. In that case try to
3685 use store_by_pieces, if it fails, punt. */
3686 if (tree_int_cst_lt (slen
, len
))
3688 unsigned int dest_align
3689 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3690 const char *p
= c_getstr (src
);
3693 if (!p
|| dest_align
== 0 || !host_integerp (len
, 1)
3694 || !can_store_by_pieces (tree_low_cst (len
, 1),
3695 builtin_strncpy_read_str
,
3696 (void *) p
, dest_align
))
3699 dest_mem
= get_memory_rtx (dest
, len
);
3700 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3701 builtin_strncpy_read_str
,
3702 (void *) p
, dest_align
, 0);
3703 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3704 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3711 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3712 bytes from constant string DATA + OFFSET and return it as target
3716 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3717 enum machine_mode mode
)
3719 const char *c
= (const char *) data
;
3720 char *p
= alloca (GET_MODE_SIZE (mode
));
3722 memset (p
, *c
, GET_MODE_SIZE (mode
));
3724 return c_readstr (p
, mode
);
3727 /* Callback routine for store_by_pieces. Return the RTL of a register
3728 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3729 char value given in the RTL register data. For example, if mode is
3730 4 bytes wide, return the RTL for 0x01010101*data. */
3733 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3734 enum machine_mode mode
)
3740 size
= GET_MODE_SIZE (mode
);
3745 memset (p
, 1, size
);
3746 coeff
= c_readstr (p
, mode
);
3748 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3749 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3750 return force_reg (mode
, target
);
3753 /* Expand expression EXP, which is a call to the memset builtin. Return
3754 NULL_RTX if we failed the caller should emit a normal call, otherwise
3755 try to get the result in TARGET, if convenient (and in mode MODE if that's
3759 expand_builtin_memset (tree exp
, rtx target
, enum machine_mode mode
)
3761 if (!validate_arglist (exp
,
3762 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3766 tree dest
= CALL_EXPR_ARG (exp
, 0);
3767 tree val
= CALL_EXPR_ARG (exp
, 1);
3768 tree len
= CALL_EXPR_ARG (exp
, 2);
3769 return expand_builtin_memset_args (dest
, val
, len
, target
, mode
, exp
);
3773 /* Helper function to do the actual work for expand_builtin_memset. The
3774 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3775 so that this can also be called without constructing an actual CALL_EXPR.
3776 The other arguments and return value are the same as for
3777 expand_builtin_memset. */
3780 expand_builtin_memset_args (tree dest
, tree val
, tree len
,
3781 rtx target
, enum machine_mode mode
, tree orig_exp
)
3784 enum built_in_function fcode
;
3786 unsigned int dest_align
;
3787 rtx dest_mem
, dest_addr
, len_rtx
;
3788 HOST_WIDE_INT expected_size
= -1;
3789 unsigned int expected_align
= 0;
3791 dest_align
= get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3793 /* If DEST is not a pointer type, don't do this operation in-line. */
3794 if (dest_align
== 0)
3797 stringop_block_profile (orig_exp
, &expected_align
, &expected_size
);
3798 if (expected_align
< dest_align
)
3799 expected_align
= dest_align
;
3801 /* If the LEN parameter is zero, return DEST. */
3802 if (integer_zerop (len
))
3804 /* Evaluate and ignore VAL in case it has side-effects. */
3805 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3806 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3809 /* Stabilize the arguments in case we fail. */
3810 dest
= builtin_save_expr (dest
);
3811 val
= builtin_save_expr (val
);
3812 len
= builtin_save_expr (len
);
3814 len_rtx
= expand_normal (len
);
3815 dest_mem
= get_memory_rtx (dest
, len
);
3817 if (TREE_CODE (val
) != INTEGER_CST
)
3821 val_rtx
= expand_normal (val
);
3822 val_rtx
= convert_to_mode (TYPE_MODE (unsigned_char_type_node
),
3825 /* Assume that we can memset by pieces if we can store
3826 * the coefficients by pieces (in the required modes).
3827 * We can't pass builtin_memset_gen_str as that emits RTL. */
3829 if (host_integerp (len
, 1)
3830 && !(optimize_size
&& tree_low_cst (len
, 1) > 1)
3831 && can_store_by_pieces (tree_low_cst (len
, 1),
3832 builtin_memset_read_str
, &c
, dest_align
))
3834 val_rtx
= force_reg (TYPE_MODE (unsigned_char_type_node
),
3836 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3837 builtin_memset_gen_str
, val_rtx
, dest_align
, 0);
3839 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, val_rtx
,
3840 dest_align
, expected_align
,
3844 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3845 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3849 if (target_char_cast (val
, &c
))
3854 if (host_integerp (len
, 1)
3855 && !(optimize_size
&& tree_low_cst (len
, 1) > 1)
3856 && can_store_by_pieces (tree_low_cst (len
, 1),
3857 builtin_memset_read_str
, &c
, dest_align
))
3858 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3859 builtin_memset_read_str
, &c
, dest_align
, 0);
3860 else if (!set_storage_via_setmem (dest_mem
, len_rtx
, GEN_INT (c
),
3861 dest_align
, expected_align
,
3865 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3866 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3870 set_mem_align (dest_mem
, dest_align
);
3871 dest_addr
= clear_storage_hints (dest_mem
, len_rtx
,
3872 CALL_EXPR_TAILCALL (orig_exp
)
3873 ? BLOCK_OP_TAILCALL
: BLOCK_OP_NORMAL
,
3874 expected_align
, expected_size
);
3878 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3879 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3885 fndecl
= get_callee_fndecl (orig_exp
);
3886 fcode
= DECL_FUNCTION_CODE (fndecl
);
3887 if (fcode
== BUILT_IN_MEMSET
)
3888 fn
= build_call_expr (fndecl
, 3, dest
, val
, len
);
3889 else if (fcode
== BUILT_IN_BZERO
)
3890 fn
= build_call_expr (fndecl
, 2, dest
, len
);
3893 if (TREE_CODE (fn
) == CALL_EXPR
)
3894 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (orig_exp
);
3895 return expand_call (fn
, target
, target
== const0_rtx
);
3898 /* Expand expression EXP, which is a call to the bzero builtin. Return
3899 NULL_RTX if we failed the caller should emit a normal call. */
3902 expand_builtin_bzero (tree exp
)
3906 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3909 dest
= CALL_EXPR_ARG (exp
, 0);
3910 size
= CALL_EXPR_ARG (exp
, 1);
3912 /* New argument list transforming bzero(ptr x, int y) to
3913 memset(ptr x, int 0, size_t y). This is done this way
3914 so that if it isn't expanded inline, we fallback to
3915 calling bzero instead of memset. */
3917 return expand_builtin_memset_args (dest
, integer_zero_node
,
3918 fold_convert (sizetype
, size
),
3919 const0_rtx
, VOIDmode
, exp
);
3922 /* Expand expression EXP, which is a call to the memcmp built-in function.
3923 Return NULL_RTX if we failed and the
3924 caller should emit a normal call, otherwise try to get the result in
3925 TARGET, if convenient (and in mode MODE, if that's convenient). */
3928 expand_builtin_memcmp (tree exp
, rtx target
, enum machine_mode mode
)
3930 if (!validate_arglist (exp
,
3931 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3935 tree result
= fold_builtin_memcmp (CALL_EXPR_ARG (exp
, 0),
3936 CALL_EXPR_ARG (exp
, 1),
3937 CALL_EXPR_ARG (exp
, 2));
3939 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3942 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
3944 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3947 tree arg1
= CALL_EXPR_ARG (exp
, 0);
3948 tree arg2
= CALL_EXPR_ARG (exp
, 1);
3949 tree len
= CALL_EXPR_ARG (exp
, 2);
3952 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
3954 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
3955 enum machine_mode insn_mode
;
3957 #ifdef HAVE_cmpmemsi
3959 insn_mode
= insn_data
[(int) CODE_FOR_cmpmemsi
].operand
[0].mode
;
3962 #ifdef HAVE_cmpstrnsi
3964 insn_mode
= insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
3969 /* If we don't have POINTER_TYPE, call the function. */
3970 if (arg1_align
== 0 || arg2_align
== 0)
3973 /* Make a place to write the result of the instruction. */
3976 && REG_P (result
) && GET_MODE (result
) == insn_mode
3977 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3978 result
= gen_reg_rtx (insn_mode
);
3980 arg1_rtx
= get_memory_rtx (arg1
, len
);
3981 arg2_rtx
= get_memory_rtx (arg2
, len
);
3982 arg3_rtx
= expand_normal (len
);
3984 /* Set MEM_SIZE as appropriate. */
3985 if (GET_CODE (arg3_rtx
) == CONST_INT
)
3987 set_mem_size (arg1_rtx
, arg3_rtx
);
3988 set_mem_size (arg2_rtx
, arg3_rtx
);
3991 #ifdef HAVE_cmpmemsi
3993 insn
= gen_cmpmemsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
3994 GEN_INT (MIN (arg1_align
, arg2_align
)));
3997 #ifdef HAVE_cmpstrnsi
3999 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4000 GEN_INT (MIN (arg1_align
, arg2_align
)));
4008 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE_MAKE_BLOCK
,
4009 TYPE_MODE (integer_type_node
), 3,
4010 XEXP (arg1_rtx
, 0), Pmode
,
4011 XEXP (arg2_rtx
, 0), Pmode
,
4012 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
4013 TYPE_UNSIGNED (sizetype
)),
4014 TYPE_MODE (sizetype
));
4016 /* Return the value in the proper mode for this function. */
4017 mode
= TYPE_MODE (TREE_TYPE (exp
));
4018 if (GET_MODE (result
) == mode
)
4020 else if (target
!= 0)
4022 convert_move (target
, result
, 0);
4026 return convert_to_mode (mode
, result
, 0);
4033 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4034 if we failed the caller should emit a normal call, otherwise try to get
4035 the result in TARGET, if convenient. */
4038 expand_builtin_strcmp (tree exp
, rtx target
, enum machine_mode mode
)
4040 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4044 tree result
= fold_builtin_strcmp (CALL_EXPR_ARG (exp
, 0),
4045 CALL_EXPR_ARG (exp
, 1));
4047 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4050 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
4051 if (cmpstr_optab
[SImode
] != CODE_FOR_nothing
4052 || cmpstrn_optab
[SImode
] != CODE_FOR_nothing
)
4054 rtx arg1_rtx
, arg2_rtx
;
4055 rtx result
, insn
= NULL_RTX
;
4057 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4058 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4061 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4063 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4065 /* If we don't have POINTER_TYPE, call the function. */
4066 if (arg1_align
== 0 || arg2_align
== 0)
4069 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4070 arg1
= builtin_save_expr (arg1
);
4071 arg2
= builtin_save_expr (arg2
);
4073 arg1_rtx
= get_memory_rtx (arg1
, NULL
);
4074 arg2_rtx
= get_memory_rtx (arg2
, NULL
);
4076 #ifdef HAVE_cmpstrsi
4077 /* Try to call cmpstrsi. */
4080 enum machine_mode insn_mode
4081 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
4083 /* Make a place to write the result of the instruction. */
4086 && REG_P (result
) && GET_MODE (result
) == insn_mode
4087 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4088 result
= gen_reg_rtx (insn_mode
);
4090 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
,
4091 GEN_INT (MIN (arg1_align
, arg2_align
)));
4094 #ifdef HAVE_cmpstrnsi
4095 /* Try to determine at least one length and call cmpstrnsi. */
4096 if (!insn
&& HAVE_cmpstrnsi
)
4101 enum machine_mode insn_mode
4102 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4103 tree len1
= c_strlen (arg1
, 1);
4104 tree len2
= c_strlen (arg2
, 1);
4107 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4109 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4111 /* If we don't have a constant length for the first, use the length
4112 of the second, if we know it. We don't require a constant for
4113 this case; some cost analysis could be done if both are available
4114 but neither is constant. For now, assume they're equally cheap,
4115 unless one has side effects. If both strings have constant lengths,
4122 else if (TREE_SIDE_EFFECTS (len1
))
4124 else if (TREE_SIDE_EFFECTS (len2
))
4126 else if (TREE_CODE (len1
) != INTEGER_CST
)
4128 else if (TREE_CODE (len2
) != INTEGER_CST
)
4130 else if (tree_int_cst_lt (len1
, len2
))
4135 /* If both arguments have side effects, we cannot optimize. */
4136 if (!len
|| TREE_SIDE_EFFECTS (len
))
4139 arg3_rtx
= expand_normal (len
);
4141 /* Make a place to write the result of the instruction. */
4144 && REG_P (result
) && GET_MODE (result
) == insn_mode
4145 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4146 result
= gen_reg_rtx (insn_mode
);
4148 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4149 GEN_INT (MIN (arg1_align
, arg2_align
)));
4157 /* Return the value in the proper mode for this function. */
4158 mode
= TYPE_MODE (TREE_TYPE (exp
));
4159 if (GET_MODE (result
) == mode
)
4162 return convert_to_mode (mode
, result
, 0);
4163 convert_move (target
, result
, 0);
4167 /* Expand the library call ourselves using a stabilized argument
4168 list to avoid re-evaluating the function's arguments twice. */
4169 #ifdef HAVE_cmpstrnsi
4172 fndecl
= get_callee_fndecl (exp
);
4173 fn
= build_call_expr (fndecl
, 2, arg1
, arg2
);
4174 if (TREE_CODE (fn
) == CALL_EXPR
)
4175 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4176 return expand_call (fn
, target
, target
== const0_rtx
);
4182 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4183 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4184 the result in TARGET, if convenient. */
4187 expand_builtin_strncmp (tree exp
, rtx target
, enum machine_mode mode
)
4189 if (!validate_arglist (exp
,
4190 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4194 tree result
= fold_builtin_strncmp (CALL_EXPR_ARG (exp
, 0),
4195 CALL_EXPR_ARG (exp
, 1),
4196 CALL_EXPR_ARG (exp
, 2));
4198 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4201 /* If c_strlen can determine an expression for one of the string
4202 lengths, and it doesn't have side effects, then emit cmpstrnsi
4203 using length MIN(strlen(string)+1, arg3). */
4204 #ifdef HAVE_cmpstrnsi
4207 tree len
, len1
, len2
;
4208 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
4211 tree arg1
= CALL_EXPR_ARG (exp
, 0);
4212 tree arg2
= CALL_EXPR_ARG (exp
, 1);
4213 tree arg3
= CALL_EXPR_ARG (exp
, 2);
4216 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4218 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
4219 enum machine_mode insn_mode
4220 = insn_data
[(int) CODE_FOR_cmpstrnsi
].operand
[0].mode
;
4222 len1
= c_strlen (arg1
, 1);
4223 len2
= c_strlen (arg2
, 1);
4226 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
4228 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
4230 /* If we don't have a constant length for the first, use the length
4231 of the second, if we know it. We don't require a constant for
4232 this case; some cost analysis could be done if both are available
4233 but neither is constant. For now, assume they're equally cheap,
4234 unless one has side effects. If both strings have constant lengths,
4241 else if (TREE_SIDE_EFFECTS (len1
))
4243 else if (TREE_SIDE_EFFECTS (len2
))
4245 else if (TREE_CODE (len1
) != INTEGER_CST
)
4247 else if (TREE_CODE (len2
) != INTEGER_CST
)
4249 else if (tree_int_cst_lt (len1
, len2
))
4254 /* If both arguments have side effects, we cannot optimize. */
4255 if (!len
|| TREE_SIDE_EFFECTS (len
))
4258 /* The actual new length parameter is MIN(len,arg3). */
4259 len
= fold_build2 (MIN_EXPR
, TREE_TYPE (len
), len
,
4260 fold_convert (TREE_TYPE (len
), arg3
));
4262 /* If we don't have POINTER_TYPE, call the function. */
4263 if (arg1_align
== 0 || arg2_align
== 0)
4266 /* Make a place to write the result of the instruction. */
4269 && REG_P (result
) && GET_MODE (result
) == insn_mode
4270 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
4271 result
= gen_reg_rtx (insn_mode
);
4273 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4274 arg1
= builtin_save_expr (arg1
);
4275 arg2
= builtin_save_expr (arg2
);
4276 len
= builtin_save_expr (len
);
4278 arg1_rtx
= get_memory_rtx (arg1
, len
);
4279 arg2_rtx
= get_memory_rtx (arg2
, len
);
4280 arg3_rtx
= expand_normal (len
);
4281 insn
= gen_cmpstrnsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
4282 GEN_INT (MIN (arg1_align
, arg2_align
)));
4287 /* Return the value in the proper mode for this function. */
4288 mode
= TYPE_MODE (TREE_TYPE (exp
));
4289 if (GET_MODE (result
) == mode
)
4292 return convert_to_mode (mode
, result
, 0);
4293 convert_move (target
, result
, 0);
4297 /* Expand the library call ourselves using a stabilized argument
4298 list to avoid re-evaluating the function's arguments twice. */
4299 fndecl
= get_callee_fndecl (exp
);
4300 fn
= build_call_expr (fndecl
, 3, arg1
, arg2
, len
);
4301 if (TREE_CODE (fn
) == CALL_EXPR
)
4302 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
4303 return expand_call (fn
, target
, target
== const0_rtx
);
4309 /* Expand expression EXP, which is a call to the strcat builtin.
4310 Return NULL_RTX if we failed the caller should emit a normal call,
4311 otherwise try to get the result in TARGET, if convenient. */
4314 expand_builtin_strcat (tree fndecl
, tree exp
, rtx target
, enum machine_mode mode
)
4316 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4320 tree dst
= CALL_EXPR_ARG (exp
, 0);
4321 tree src
= CALL_EXPR_ARG (exp
, 1);
4322 const char *p
= c_getstr (src
);
4324 /* If the string length is zero, return the dst parameter. */
4325 if (p
&& *p
== '\0')
4326 return expand_expr (dst
, target
, mode
, EXPAND_NORMAL
);
4330 /* See if we can store by pieces into (dst + strlen(dst)). */
4331 tree newsrc
, newdst
,
4332 strlen_fn
= implicit_built_in_decls
[BUILT_IN_STRLEN
];
4335 /* Stabilize the argument list. */
4336 newsrc
= builtin_save_expr (src
);
4337 dst
= builtin_save_expr (dst
);
4341 /* Create strlen (dst). */
4342 newdst
= build_call_expr (strlen_fn
, 1, dst
);
4343 /* Create (dst + (cast) strlen (dst)). */
4344 newdst
= fold_convert (TREE_TYPE (dst
), newdst
);
4345 newdst
= fold_build2 (PLUS_EXPR
, TREE_TYPE (dst
), dst
, newdst
);
4347 newdst
= builtin_save_expr (newdst
);
4349 if (!expand_builtin_strcpy_args (fndecl
, newdst
, newsrc
, target
, mode
))
4351 end_sequence (); /* Stop sequence. */
4355 /* Output the entire sequence. */
4356 insns
= get_insns ();
4360 return expand_expr (dst
, target
, mode
, EXPAND_NORMAL
);
4367 /* Expand expression EXP, which is a call to the strncat builtin.
4368 Return NULL_RTX if we failed the caller should emit a normal call,
4369 otherwise try to get the result in TARGET, if convenient. */
4372 expand_builtin_strncat (tree exp
, rtx target
, enum machine_mode mode
)
4374 if (validate_arglist (exp
,
4375 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4377 tree result
= fold_builtin_strncat (CALL_EXPR_ARG (exp
, 0),
4378 CALL_EXPR_ARG (exp
, 1),
4379 CALL_EXPR_ARG (exp
, 2));
4381 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4386 /* Expand expression EXP, which is a call to the strspn builtin.
4387 Return NULL_RTX if we failed the caller should emit a normal call,
4388 otherwise try to get the result in TARGET, if convenient. */
4391 expand_builtin_strspn (tree exp
, rtx target
, enum machine_mode mode
)
4393 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4395 tree result
= fold_builtin_strspn (CALL_EXPR_ARG (exp
, 0),
4396 CALL_EXPR_ARG (exp
, 1));
4398 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4403 /* Expand expression EXP, which is a call to the strcspn builtin.
4404 Return NULL_RTX if we failed the caller should emit a normal call,
4405 otherwise try to get the result in TARGET, if convenient. */
4408 expand_builtin_strcspn (tree exp
, rtx target
, enum machine_mode mode
)
4410 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4412 tree result
= fold_builtin_strcspn (CALL_EXPR_ARG (exp
, 0),
4413 CALL_EXPR_ARG (exp
, 1));
4415 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
4420 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4421 if that's convenient. */
4424 expand_builtin_saveregs (void)
4428 /* Don't do __builtin_saveregs more than once in a function.
4429 Save the result of the first call and reuse it. */
4430 if (saveregs_value
!= 0)
4431 return saveregs_value
;
4433 /* When this function is called, it means that registers must be
4434 saved on entry to this function. So we migrate the call to the
4435 first insn of this function. */
4439 /* Do whatever the machine needs done in this case. */
4440 val
= targetm
.calls
.expand_builtin_saveregs ();
4445 saveregs_value
= val
;
4447 /* Put the insns after the NOTE that starts the function. If this
4448 is inside a start_sequence, make the outer-level insn chain current, so
4449 the code is placed at the start of the function. */
4450 push_topmost_sequence ();
4451 emit_insn_after (seq
, entry_of_function ());
4452 pop_topmost_sequence ();
4457 /* __builtin_args_info (N) returns word N of the arg space info
4458 for the current function. The number and meanings of words
4459 is controlled by the definition of CUMULATIVE_ARGS. */
4462 expand_builtin_args_info (tree exp
)
4464 int nwords
= sizeof (CUMULATIVE_ARGS
) / sizeof (int);
4465 int *word_ptr
= (int *) ¤t_function_args_info
;
4467 gcc_assert (sizeof (CUMULATIVE_ARGS
) % sizeof (int) == 0);
4469 if (call_expr_nargs (exp
) != 0)
4471 if (!host_integerp (CALL_EXPR_ARG (exp
, 0), 0))
4472 error ("argument of %<__builtin_args_info%> must be constant");
4475 HOST_WIDE_INT wordnum
= tree_low_cst (CALL_EXPR_ARG (exp
, 0), 0);
4477 if (wordnum
< 0 || wordnum
>= nwords
)
4478 error ("argument of %<__builtin_args_info%> out of range");
4480 return GEN_INT (word_ptr
[wordnum
]);
4484 error ("missing argument in %<__builtin_args_info%>");
4489 /* Expand a call to __builtin_next_arg. */
4492 expand_builtin_next_arg (void)
4494 /* Checking arguments is already done in fold_builtin_next_arg
4495 that must be called before this function. */
4496 return expand_binop (Pmode
, add_optab
,
4497 current_function_internal_arg_pointer
,
4498 current_function_arg_offset_rtx
,
4499 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
4502 /* Make it easier for the backends by protecting the valist argument
4503 from multiple evaluations. */
4506 stabilize_va_list (tree valist
, int needs_lvalue
)
4508 if (TREE_CODE (va_list_type_node
) == ARRAY_TYPE
)
4510 if (TREE_SIDE_EFFECTS (valist
))
4511 valist
= save_expr (valist
);
4513 /* For this case, the backends will be expecting a pointer to
4514 TREE_TYPE (va_list_type_node), but it's possible we've
4515 actually been given an array (an actual va_list_type_node).
4517 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4519 tree p1
= build_pointer_type (TREE_TYPE (va_list_type_node
));
4520 valist
= build_fold_addr_expr_with_type (valist
, p1
);
4529 if (! TREE_SIDE_EFFECTS (valist
))
4532 pt
= build_pointer_type (va_list_type_node
);
4533 valist
= fold_build1 (ADDR_EXPR
, pt
, valist
);
4534 TREE_SIDE_EFFECTS (valist
) = 1;
4537 if (TREE_SIDE_EFFECTS (valist
))
4538 valist
= save_expr (valist
);
4539 valist
= build_fold_indirect_ref (valist
);
4545 /* The "standard" definition of va_list is void*. */
4548 std_build_builtin_va_list (void)
4550 return ptr_type_node
;
4553 /* The "standard" implementation of va_start: just assign `nextarg' to
4557 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4561 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist
,
4562 make_tree (ptr_type_node
, nextarg
));
4563 TREE_SIDE_EFFECTS (t
) = 1;
4565 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4568 /* Expand EXP, a call to __builtin_va_start. */
4571 expand_builtin_va_start (tree exp
)
4576 if (call_expr_nargs (exp
) < 2)
4578 error ("too few arguments to function %<va_start%>");
4582 if (fold_builtin_next_arg (exp
, true))
4585 nextarg
= expand_builtin_next_arg ();
4586 valist
= stabilize_va_list (CALL_EXPR_ARG (exp
, 0), 1);
4588 #ifdef EXPAND_BUILTIN_VA_START
4589 EXPAND_BUILTIN_VA_START (valist
, nextarg
);
4591 std_expand_builtin_va_start (valist
, nextarg
);
4597 /* The "standard" implementation of va_arg: read the value from the
4598 current (padded) address and increment by the (padded) size. */
4601 std_gimplify_va_arg_expr (tree valist
, tree type
, tree
*pre_p
, tree
*post_p
)
4603 tree addr
, t
, type_size
, rounded_size
, valist_tmp
;
4604 unsigned HOST_WIDE_INT align
, boundary
;
4607 #ifdef ARGS_GROW_DOWNWARD
4608 /* All of the alignment and movement below is for args-grow-up machines.
4609 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4610 implement their own specialized gimplify_va_arg_expr routines. */
4614 indirect
= pass_by_reference (NULL
, TYPE_MODE (type
), type
, false);
4616 type
= build_pointer_type (type
);
4618 align
= PARM_BOUNDARY
/ BITS_PER_UNIT
;
4619 boundary
= FUNCTION_ARG_BOUNDARY (TYPE_MODE (type
), type
) / BITS_PER_UNIT
;
4621 /* Hoist the valist value into a temporary for the moment. */
4622 valist_tmp
= get_initialized_tmp_var (valist
, pre_p
, NULL
);
4624 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4625 requires greater alignment, we must perform dynamic alignment. */
4626 if (boundary
> align
4627 && !integer_zerop (TYPE_SIZE (type
)))
4629 t
= fold_convert (TREE_TYPE (valist
), size_int (boundary
- 1));
4630 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist_tmp
,
4631 build2 (PLUS_EXPR
, TREE_TYPE (valist
), valist_tmp
, t
));
4632 gimplify_and_add (t
, pre_p
);
4634 t
= fold_convert (TREE_TYPE (valist
), size_int (-boundary
));
4635 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist_tmp
,
4636 build2 (BIT_AND_EXPR
, TREE_TYPE (valist
), valist_tmp
, t
));
4637 gimplify_and_add (t
, pre_p
);
4642 /* If the actual alignment is less than the alignment of the type,
4643 adjust the type accordingly so that we don't assume strict alignment
4644 when deferencing the pointer. */
4645 boundary
*= BITS_PER_UNIT
;
4646 if (boundary
< TYPE_ALIGN (type
))
4648 type
= build_variant_type_copy (type
);
4649 TYPE_ALIGN (type
) = boundary
;
4652 /* Compute the rounded size of the type. */
4653 type_size
= size_in_bytes (type
);
4654 rounded_size
= round_up (type_size
, align
);
4656 /* Reduce rounded_size so it's sharable with the postqueue. */
4657 gimplify_expr (&rounded_size
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
4661 if (PAD_VARARGS_DOWN
&& !integer_zerop (rounded_size
))
4663 /* Small args are padded downward. */
4664 t
= fold_build2 (GT_EXPR
, sizetype
, rounded_size
, size_int (align
));
4665 t
= fold_build3 (COND_EXPR
, sizetype
, t
, size_zero_node
,
4666 size_binop (MINUS_EXPR
, rounded_size
, type_size
));
4667 t
= fold_convert (TREE_TYPE (addr
), t
);
4668 addr
= fold_build2 (PLUS_EXPR
, TREE_TYPE (addr
), addr
, t
);
4671 /* Compute new value for AP. */
4672 t
= fold_convert (TREE_TYPE (valist
), rounded_size
);
4673 t
= build2 (PLUS_EXPR
, TREE_TYPE (valist
), valist_tmp
, t
);
4674 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist
, t
);
4675 gimplify_and_add (t
, pre_p
);
4677 addr
= fold_convert (build_pointer_type (type
), addr
);
4680 addr
= build_va_arg_indirect_ref (addr
);
4682 return build_va_arg_indirect_ref (addr
);
4685 /* Build an indirect-ref expression over the given TREE, which represents a
4686 piece of a va_arg() expansion. */
4688 build_va_arg_indirect_ref (tree addr
)
4690 addr
= build_fold_indirect_ref (addr
);
4692 if (flag_mudflap
) /* Don't instrument va_arg INDIRECT_REF. */
4698 /* Return a dummy expression of type TYPE in order to keep going after an
4702 dummy_object (tree type
)
4704 tree t
= build_int_cst (build_pointer_type (type
), 0);
4705 return build1 (INDIRECT_REF
, type
, t
);
4708 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4709 builtin function, but a very special sort of operator. */
4711 enum gimplify_status
4712 gimplify_va_arg_expr (tree
*expr_p
, tree
*pre_p
, tree
*post_p
)
4714 tree promoted_type
, want_va_type
, have_va_type
;
4715 tree valist
= TREE_OPERAND (*expr_p
, 0);
4716 tree type
= TREE_TYPE (*expr_p
);
4719 /* Verify that valist is of the proper type. */
4720 want_va_type
= va_list_type_node
;
4721 have_va_type
= TREE_TYPE (valist
);
4723 if (have_va_type
== error_mark_node
)
4726 if (TREE_CODE (want_va_type
) == ARRAY_TYPE
)
4728 /* If va_list is an array type, the argument may have decayed
4729 to a pointer type, e.g. by being passed to another function.
4730 In that case, unwrap both types so that we can compare the
4731 underlying records. */
4732 if (TREE_CODE (have_va_type
) == ARRAY_TYPE
4733 || POINTER_TYPE_P (have_va_type
))
4735 want_va_type
= TREE_TYPE (want_va_type
);
4736 have_va_type
= TREE_TYPE (have_va_type
);
4740 if (TYPE_MAIN_VARIANT (want_va_type
) != TYPE_MAIN_VARIANT (have_va_type
))
4742 error ("first argument to %<va_arg%> not of type %<va_list%>");
4746 /* Generate a diagnostic for requesting data of a type that cannot
4747 be passed through `...' due to type promotion at the call site. */
4748 else if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
4751 static bool gave_help
;
4753 /* Unfortunately, this is merely undefined, rather than a constraint
4754 violation, so we cannot make this an error. If this call is never
4755 executed, the program is still strictly conforming. */
4756 warning (0, "%qT is promoted to %qT when passed through %<...%>",
4757 type
, promoted_type
);
4761 warning (0, "(so you should pass %qT not %qT to %<va_arg%>)",
4762 promoted_type
, type
);
4765 /* We can, however, treat "undefined" any way we please.
4766 Call abort to encourage the user to fix the program. */
4767 inform ("if this code is reached, the program will abort");
4768 t
= build_call_expr (implicit_built_in_decls
[BUILT_IN_TRAP
], 0);
4769 append_to_statement_list (t
, pre_p
);
4771 /* This is dead code, but go ahead and finish so that the
4772 mode of the result comes out right. */
4773 *expr_p
= dummy_object (type
);
4778 /* Make it easier for the backends by protecting the valist argument
4779 from multiple evaluations. */
4780 if (TREE_CODE (va_list_type_node
) == ARRAY_TYPE
)
4782 /* For this case, the backends will be expecting a pointer to
4783 TREE_TYPE (va_list_type_node), but it's possible we've
4784 actually been given an array (an actual va_list_type_node).
4786 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4788 tree p1
= build_pointer_type (TREE_TYPE (va_list_type_node
));
4789 valist
= build_fold_addr_expr_with_type (valist
, p1
);
4791 gimplify_expr (&valist
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
4794 gimplify_expr (&valist
, pre_p
, post_p
, is_gimple_min_lval
, fb_lvalue
);
4796 if (!targetm
.gimplify_va_arg_expr
)
4797 /* FIXME:Once most targets are converted we should merely
4798 assert this is non-null. */
4801 *expr_p
= targetm
.gimplify_va_arg_expr (valist
, type
, pre_p
, post_p
);
4806 /* Expand EXP, a call to __builtin_va_end. */
4809 expand_builtin_va_end (tree exp
)
4811 tree valist
= CALL_EXPR_ARG (exp
, 0);
4813 /* Evaluate for side effects, if needed. I hate macros that don't
4815 if (TREE_SIDE_EFFECTS (valist
))
4816 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4821 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4822 builtin rather than just as an assignment in stdarg.h because of the
4823 nastiness of array-type va_list types. */
4826 expand_builtin_va_copy (tree exp
)
4830 dst
= CALL_EXPR_ARG (exp
, 0);
4831 src
= CALL_EXPR_ARG (exp
, 1);
4833 dst
= stabilize_va_list (dst
, 1);
4834 src
= stabilize_va_list (src
, 0);
4836 if (TREE_CODE (va_list_type_node
) != ARRAY_TYPE
)
4838 t
= build2 (MODIFY_EXPR
, va_list_type_node
, dst
, src
);
4839 TREE_SIDE_EFFECTS (t
) = 1;
4840 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4844 rtx dstb
, srcb
, size
;
4846 /* Evaluate to pointers. */
4847 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4848 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4849 size
= expand_expr (TYPE_SIZE_UNIT (va_list_type_node
), NULL_RTX
,
4850 VOIDmode
, EXPAND_NORMAL
);
4852 dstb
= convert_memory_address (Pmode
, dstb
);
4853 srcb
= convert_memory_address (Pmode
, srcb
);
4855 /* "Dereference" to BLKmode memories. */
4856 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4857 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4858 set_mem_align (dstb
, TYPE_ALIGN (va_list_type_node
));
4859 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4860 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4861 set_mem_align (srcb
, TYPE_ALIGN (va_list_type_node
));
4864 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4870 /* Expand a call to one of the builtin functions __builtin_frame_address or
4871 __builtin_return_address. */
4874 expand_builtin_frame_address (tree fndecl
, tree exp
)
4876 /* The argument must be a nonnegative integer constant.
4877 It counts the number of frames to scan up the stack.
4878 The value is the return address saved in that frame. */
4879 if (call_expr_nargs (exp
) == 0)
4880 /* Warning about missing arg was already issued. */
4882 else if (! host_integerp (CALL_EXPR_ARG (exp
, 0), 1))
4884 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4885 error ("invalid argument to %<__builtin_frame_address%>");
4887 error ("invalid argument to %<__builtin_return_address%>");
4893 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
4894 tree_low_cst (CALL_EXPR_ARG (exp
, 0), 1));
4896 /* Some ports cannot access arbitrary stack frames. */
4899 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4900 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4902 warning (0, "unsupported argument to %<__builtin_return_address%>");
4906 /* For __builtin_frame_address, return what we've got. */
4907 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4911 && ! CONSTANT_P (tem
))
4912 tem
= copy_to_mode_reg (Pmode
, tem
);
4917 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if
4918 we failed and the caller should emit a normal call, otherwise try to get
4919 the result in TARGET, if convenient. */
4922 expand_builtin_alloca (tree exp
, rtx target
)
4927 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
4928 should always expand to function calls. These can be intercepted
4933 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4936 /* Compute the argument. */
4937 op0
= expand_normal (CALL_EXPR_ARG (exp
, 0));
4939 /* Allocate the desired space. */
4940 result
= allocate_dynamic_stack_space (op0
, target
, BITS_PER_UNIT
);
4941 result
= convert_memory_address (ptr_mode
, result
);
4946 /* Expand a call to a bswap builtin with argument ARG0. MODE
4947 is the mode to expand with. */
4950 expand_builtin_bswap (tree exp
, rtx target
, rtx subtarget
)
4952 enum machine_mode mode
;
4956 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4959 arg
= CALL_EXPR_ARG (exp
, 0);
4960 mode
= TYPE_MODE (TREE_TYPE (arg
));
4961 op0
= expand_expr (arg
, subtarget
, VOIDmode
, 0);
4963 target
= expand_unop (mode
, bswap_optab
, op0
, target
, 1);
4965 gcc_assert (target
);
4967 return convert_to_mode (mode
, target
, 0);
4970 /* Expand a call to a unary builtin in EXP.
4971 Return NULL_RTX if a normal call should be emitted rather than expanding the
4972 function in-line. If convenient, the result should be placed in TARGET.
4973 SUBTARGET may be used as the target for computing one of EXP's operands. */
4976 expand_builtin_unop (enum machine_mode target_mode
, tree exp
, rtx target
,
4977 rtx subtarget
, optab op_optab
)
4981 if (!validate_arglist (exp
, INTEGER_TYPE
, VOID_TYPE
))
4984 /* Compute the argument. */
4985 op0
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
, VOIDmode
, 0);
4986 /* Compute op, into TARGET if possible.
4987 Set TARGET to wherever the result comes back. */
4988 target
= expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 0))),
4989 op_optab
, op0
, target
, 1);
4990 gcc_assert (target
);
4992 return convert_to_mode (target_mode
, target
, 0);
4995 /* If the string passed to fputs is a constant and is one character
4996 long, we attempt to transform this call into __builtin_fputc(). */
4999 expand_builtin_fputs (tree exp
, rtx target
, bool unlocked
)
5001 /* Verify the arguments in the original call. */
5002 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
5004 tree result
= fold_builtin_fputs (CALL_EXPR_ARG (exp
, 0),
5005 CALL_EXPR_ARG (exp
, 1),
5006 (target
== const0_rtx
),
5007 unlocked
, NULL_TREE
);
5009 return expand_expr (result
, target
, VOIDmode
, EXPAND_NORMAL
);
5014 /* Expand a call to __builtin_expect. We just return our argument
5015 as the builtin_expect semantic should've been already executed by
5016 tree branch prediction pass. */
5019 expand_builtin_expect (tree exp
, rtx target
)
5023 if (call_expr_nargs (exp
) < 2)
5025 arg
= CALL_EXPR_ARG (exp
, 0);
5026 c
= CALL_EXPR_ARG (exp
, 1);
5028 target
= expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5029 /* When guessing was done, the hints should be already stripped away. */
5030 gcc_assert (!flag_guess_branch_prob
);
5035 expand_builtin_trap (void)
5039 emit_insn (gen_trap ());
5042 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
5046 /* Expand EXP, a call to fabs, fabsf or fabsl.
5047 Return NULL_RTX if a normal call should be emitted rather than expanding
5048 the function inline. If convenient, the result should be placed
5049 in TARGET. SUBTARGET may be used as the target for computing
5053 expand_builtin_fabs (tree exp
, rtx target
, rtx subtarget
)
5055 enum machine_mode mode
;
5059 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5062 arg
= CALL_EXPR_ARG (exp
, 0);
5063 mode
= TYPE_MODE (TREE_TYPE (arg
));
5064 op0
= expand_expr (arg
, subtarget
, VOIDmode
, 0);
5065 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
5068 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5069 Return NULL is a normal call should be emitted rather than expanding the
5070 function inline. If convenient, the result should be placed in TARGET.
5071 SUBTARGET may be used as the target for computing the operand. */
5074 expand_builtin_copysign (tree exp
, rtx target
, rtx subtarget
)
5079 if (!validate_arglist (exp
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
5082 arg
= CALL_EXPR_ARG (exp
, 0);
5083 op0
= expand_expr (arg
, subtarget
, VOIDmode
, EXPAND_NORMAL
);
5085 arg
= CALL_EXPR_ARG (exp
, 1);
5086 op1
= expand_normal (arg
);
5088 return expand_copysign (op0
, op1
, target
);
5091 /* Create a new constant string literal and return a char* pointer to it.
5092 The STRING_CST value is the LEN characters at STR. */
5094 build_string_literal (int len
, const char *str
)
5096 tree t
, elem
, index
, type
;
5098 t
= build_string (len
, str
);
5099 elem
= build_type_variant (char_type_node
, 1, 0);
5100 index
= build_index_type (build_int_cst (NULL_TREE
, len
- 1));
5101 type
= build_array_type (elem
, index
);
5102 TREE_TYPE (t
) = type
;
5103 TREE_CONSTANT (t
) = 1;
5104 TREE_INVARIANT (t
) = 1;
5105 TREE_READONLY (t
) = 1;
5106 TREE_STATIC (t
) = 1;
5108 type
= build_pointer_type (type
);
5109 t
= build1 (ADDR_EXPR
, type
, t
);
5111 type
= build_pointer_type (elem
);
5112 t
= build1 (NOP_EXPR
, type
, t
);
5116 /* Expand EXP, a call to printf or printf_unlocked.
5117 Return NULL_RTX if a normal call should be emitted rather than transforming
5118 the function inline. If convenient, the result should be placed in
5119 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
5122 expand_builtin_printf (tree exp
, rtx target
, enum machine_mode mode
,
5125 /* If we're using an unlocked function, assume the other unlocked
5126 functions exist explicitly. */
5127 tree
const fn_putchar
= unlocked
? built_in_decls
[BUILT_IN_PUTCHAR_UNLOCKED
]
5128 : implicit_built_in_decls
[BUILT_IN_PUTCHAR
];
5129 tree
const fn_puts
= unlocked
? built_in_decls
[BUILT_IN_PUTS_UNLOCKED
]
5130 : implicit_built_in_decls
[BUILT_IN_PUTS
];
5131 const char *fmt_str
;
5134 int nargs
= call_expr_nargs (exp
);
5136 /* If the return value is used, don't do the transformation. */
5137 if (target
!= const0_rtx
)
5140 /* Verify the required arguments in the original call. */
5143 fmt
= CALL_EXPR_ARG (exp
, 0);
5144 if (! POINTER_TYPE_P (TREE_TYPE (fmt
)))
5147 /* Check whether the format is a literal string constant. */
5148 fmt_str
= c_getstr (fmt
);
5149 if (fmt_str
== NULL
)
5152 if (!init_target_chars ())
5155 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
5156 if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
5159 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp
, 1))))
5162 fn
= build_call_expr (fn_puts
, 1, CALL_EXPR_ARG (exp
, 1));
5164 /* If the format specifier was "%c", call __builtin_putchar(arg). */
5165 else if (strcmp (fmt_str
, target_percent_c
) == 0)
5168 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 1))) != INTEGER_TYPE
)
5171 fn
= build_call_expr (fn_putchar
, 1, CALL_EXPR_ARG (exp
, 1));
5175 /* We can't handle anything else with % args or %% ... yet. */
5176 if (strchr (fmt_str
, target_percent
))
5182 /* If the format specifier was "", printf does nothing. */
5183 if (fmt_str
[0] == '\0')
5185 /* If the format specifier has length of 1, call putchar. */
5186 if (fmt_str
[1] == '\0')
5188 /* Given printf("c"), (where c is any one character,)
5189 convert "c"[0] to an int and pass that to the replacement
5191 arg
= build_int_cst (NULL_TREE
, fmt_str
[0]);
5193 fn
= build_call_expr (fn_putchar
, 1, arg
);
5197 /* If the format specifier was "string\n", call puts("string"). */
5198 size_t len
= strlen (fmt_str
);
5199 if ((unsigned char)fmt_str
[len
- 1] == target_newline
)
5201 /* Create a NUL-terminated string that's one char shorter
5202 than the original, stripping off the trailing '\n'. */
5203 char *newstr
= alloca (len
);
5204 memcpy (newstr
, fmt_str
, len
- 1);
5205 newstr
[len
- 1] = 0;
5206 arg
= build_string_literal (len
, newstr
);
5208 fn
= build_call_expr (fn_puts
, 1, arg
);
5211 /* We'd like to arrange to call fputs(string,stdout) here,
5212 but we need stdout and don't have a way to get it yet. */
5219 if (TREE_CODE (fn
) == CALL_EXPR
)
5220 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
5221 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
5224 /* Expand EXP, a call to fprintf or fprintf_unlocked.
5225 Return NULL_RTX if a normal call should be emitted rather than transforming
5226 the function inline. If convenient, the result should be placed in
5227 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
5230 expand_builtin_fprintf (tree exp
, rtx target
, enum machine_mode mode
,
5233 /* If we're using an unlocked function, assume the other unlocked
5234 functions exist explicitly. */
5235 tree
const fn_fputc
= unlocked
? built_in_decls
[BUILT_IN_FPUTC_UNLOCKED
]
5236 : implicit_built_in_decls
[BUILT_IN_FPUTC
];
5237 tree
const fn_fputs
= unlocked
? built_in_decls
[BUILT_IN_FPUTS_UNLOCKED
]
5238 : implicit_built_in_decls
[BUILT_IN_FPUTS
];
5239 const char *fmt_str
;
5242 int nargs
= call_expr_nargs (exp
);
5244 /* If the return value is used, don't do the transformation. */
5245 if (target
!= const0_rtx
)
5248 /* Verify the required arguments in the original call. */
5251 fp
= CALL_EXPR_ARG (exp
, 0);
5252 if (! POINTER_TYPE_P (TREE_TYPE (fp
)))
5254 fmt
= CALL_EXPR_ARG (exp
, 1);
5255 if (! POINTER_TYPE_P (TREE_TYPE (fmt
)))
5258 /* Check whether the format is a literal string constant. */
5259 fmt_str
= c_getstr (fmt
);
5260 if (fmt_str
== NULL
)
5263 if (!init_target_chars ())
5266 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
5267 if (strcmp (fmt_str
, target_percent_s
) == 0)
5270 || ! POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (exp
, 2))))
5272 arg
= CALL_EXPR_ARG (exp
, 2);
5274 fn
= build_call_expr (fn_fputs
, 2, arg
, fp
);
5276 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
5277 else if (strcmp (fmt_str
, target_percent_c
) == 0)
5280 || TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (exp
, 2))) != INTEGER_TYPE
)
5282 arg
= CALL_EXPR_ARG (exp
, 2);
5284 fn
= build_call_expr (fn_fputc
, 2, arg
, fp
);
5288 /* We can't handle anything else with % args or %% ... yet. */
5289 if (strchr (fmt_str
, target_percent
))
5295 /* If the format specifier was "", fprintf does nothing. */
5296 if (fmt_str
[0] == '\0')
5298 /* Evaluate and ignore FILE* argument for side-effects. */
5299 expand_expr (fp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5303 /* When "string" doesn't contain %, replace all cases of
5304 fprintf(stream,string) with fputs(string,stream). The fputs
5305 builtin will take care of special cases like length == 1. */
5307 fn
= build_call_expr (fn_fputs
, 2, fmt
, fp
);
5312 if (TREE_CODE (fn
) == CALL_EXPR
)
5313 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
5314 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
5317 /* Expand a call EXP to sprintf. Return NULL_RTX if
5318 a normal call should be emitted rather than expanding the function
5319 inline. If convenient, the result should be placed in TARGET with
5323 expand_builtin_sprintf (tree exp
, rtx target
, enum machine_mode mode
)
5326 const char *fmt_str
;
5327 int nargs
= call_expr_nargs (exp
);
5329 /* Verify the required arguments in the original call. */
5332 dest
= CALL_EXPR_ARG (exp
, 0);
5333 if (! POINTER_TYPE_P (TREE_TYPE (dest
)))
5335 fmt
= CALL_EXPR_ARG (exp
, 0);
5336 if (! POINTER_TYPE_P (TREE_TYPE (fmt
)))
5339 /* Check whether the format is a literal string constant. */
5340 fmt_str
= c_getstr (fmt
);
5341 if (fmt_str
== NULL
)
5344 if (!init_target_chars ())
5347 /* If the format doesn't contain % args or %%, use strcpy. */
5348 if (strchr (fmt_str
, target_percent
) == 0)
5350 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
5353 if ((nargs
> 2) || ! fn
)
5355 expand_expr (build_call_expr (fn
, 2, dest
, fmt
),
5356 const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5357 if (target
== const0_rtx
)
5359 exp
= build_int_cst (NULL_TREE
, strlen (fmt_str
));
5360 return expand_expr (exp
, target
, mode
, EXPAND_NORMAL
);
5362 /* If the format is "%s", use strcpy if the result isn't used. */
5363 else if (strcmp (fmt_str
, target_percent_s
) == 0)
5366 fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
5372 arg
= CALL_EXPR_ARG (exp
, 2);
5373 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
5376 if (target
!= const0_rtx
)
5378 len
= c_strlen (arg
, 1);
5379 if (! len
|| TREE_CODE (len
) != INTEGER_CST
)
5385 expand_expr (build_call_expr (fn
, 2, dest
, arg
),
5386 const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5388 if (target
== const0_rtx
)
5390 return expand_expr (len
, target
, mode
, EXPAND_NORMAL
);
5396 /* Expand a call to either the entry or exit function profiler. */
5399 expand_builtin_profile_func (bool exitp
)
5403 this = DECL_RTL (current_function_decl
);
5404 gcc_assert (MEM_P (this));
5405 this = XEXP (this, 0);
5408 which
= profile_function_exit_libfunc
;
5410 which
= profile_function_entry_libfunc
;
5412 emit_library_call (which
, LCT_NORMAL
, VOIDmode
, 2, this, Pmode
,
5413 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS
,
5420 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5423 round_trampoline_addr (rtx tramp
)
5425 rtx temp
, addend
, mask
;
5427 /* If we don't need too much alignment, we'll have been guaranteed
5428 proper alignment by get_trampoline_type. */
5429 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
5432 /* Round address up to desired boundary. */
5433 temp
= gen_reg_rtx (Pmode
);
5434 addend
= GEN_INT (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1);
5435 mask
= GEN_INT (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
);
5437 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
5438 temp
, 0, OPTAB_LIB_WIDEN
);
5439 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
5440 temp
, 0, OPTAB_LIB_WIDEN
);
5446 expand_builtin_init_trampoline (tree exp
)
5448 tree t_tramp
, t_func
, t_chain
;
5449 rtx r_tramp
, r_func
, r_chain
;
5450 #ifdef TRAMPOLINE_TEMPLATE
5454 if (!validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
,
5455 POINTER_TYPE
, VOID_TYPE
))
5458 t_tramp
= CALL_EXPR_ARG (exp
, 0);
5459 t_func
= CALL_EXPR_ARG (exp
, 1);
5460 t_chain
= CALL_EXPR_ARG (exp
, 2);
5462 r_tramp
= expand_normal (t_tramp
);
5463 r_func
= expand_normal (t_func
);
5464 r_chain
= expand_normal (t_chain
);
5466 /* Generate insns to initialize the trampoline. */
5467 r_tramp
= round_trampoline_addr (r_tramp
);
5468 #ifdef TRAMPOLINE_TEMPLATE
5469 blktramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
5470 set_mem_align (blktramp
, TRAMPOLINE_ALIGNMENT
);
5471 emit_block_move (blktramp
, assemble_trampoline_template (),
5472 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
5474 trampolines_created
= 1;
5475 INITIALIZE_TRAMPOLINE (r_tramp
, r_func
, r_chain
);
5481 expand_builtin_adjust_trampoline (tree exp
)
5485 if (!validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
5488 tramp
= expand_normal (CALL_EXPR_ARG (exp
, 0));
5489 tramp
= round_trampoline_addr (tramp
);
5490 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5491 TRAMPOLINE_ADJUST_ADDRESS (tramp
);
5497 /* Expand a call to the built-in signbit, signbitf or signbitl function.
5498 Return NULL_RTX if a normal call should be emitted rather than expanding
5499 the function in-line. EXP is the expression that is a call to the builtin
5500 function; if convenient, the result should be placed in TARGET. */
5503 expand_builtin_signbit (tree exp
, rtx target
)
5505 const struct real_format
*fmt
;
5506 enum machine_mode fmode
, imode
, rmode
;
5507 HOST_WIDE_INT hi
, lo
;
5512 if (!validate_arglist (exp
, REAL_TYPE
, VOID_TYPE
))
5515 arg
= CALL_EXPR_ARG (exp
, 0);
5516 fmode
= TYPE_MODE (TREE_TYPE (arg
));
5517 rmode
= TYPE_MODE (TREE_TYPE (exp
));
5518 fmt
= REAL_MODE_FORMAT (fmode
);
5520 /* For floating point formats without a sign bit, implement signbit
5522 bitpos
= fmt
->signbit_ro
;
5525 /* But we can't do this if the format supports signed zero. */
5526 if (fmt
->has_signed_zero
&& HONOR_SIGNED_ZEROS (fmode
))
5529 arg
= fold_build2 (LT_EXPR
, TREE_TYPE (exp
), arg
,
5530 build_real (TREE_TYPE (arg
), dconst0
));
5531 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5534 temp
= expand_normal (arg
);
5535 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
5537 imode
= int_mode_for_mode (fmode
);
5538 if (imode
== BLKmode
)
5540 temp
= gen_lowpart (imode
, temp
);
5545 /* Handle targets with different FP word orders. */
5546 if (FLOAT_WORDS_BIG_ENDIAN
)
5547 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
5549 word
= bitpos
/ BITS_PER_WORD
;
5550 temp
= operand_subword_force (temp
, word
, fmode
);
5551 bitpos
= bitpos
% BITS_PER_WORD
;
5554 /* Force the intermediate word_mode (or narrower) result into a
5555 register. This avoids attempting to create paradoxical SUBREGs
5556 of floating point modes below. */
5557 temp
= force_reg (imode
, temp
);
5559 /* If the bitpos is within the "result mode" lowpart, the operation
5560 can be implement with a single bitwise AND. Otherwise, we need
5561 a right shift and an AND. */
5563 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5565 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
5568 lo
= (HOST_WIDE_INT
) 1 << bitpos
;
5572 hi
= (HOST_WIDE_INT
) 1 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
5577 temp
= gen_lowpart (rmode
, temp
);
5578 temp
= expand_binop (rmode
, and_optab
, temp
,
5579 immed_double_const (lo
, hi
, rmode
),
5580 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5584 /* Perform a logical right shift to place the signbit in the least
5585 significant bit, then truncate the result to the desired mode
5586 and mask just this bit. */
5587 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
,
5588 build_int_cst (NULL_TREE
, bitpos
), NULL_RTX
, 1);
5589 temp
= gen_lowpart (rmode
, temp
);
5590 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5591 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5597 /* Expand fork or exec calls. TARGET is the desired target of the
5598 call. EXP is the call. FN is the
5599 identificator of the actual function. IGNORE is nonzero if the
5600 value is to be ignored. */
5603 expand_builtin_fork_or_exec (tree fn
, tree exp
, rtx target
, int ignore
)
5608 /* If we are not profiling, just call the function. */
5609 if (!profile_arc_flag
)
5612 /* Otherwise call the wrapper. This should be equivalent for the rest of
5613 compiler, so the code does not diverge, and the wrapper may run the
5614 code necessary for keeping the profiling sane. */
5616 switch (DECL_FUNCTION_CODE (fn
))
5619 id
= get_identifier ("__gcov_fork");
5622 case BUILT_IN_EXECL
:
5623 id
= get_identifier ("__gcov_execl");
5626 case BUILT_IN_EXECV
:
5627 id
= get_identifier ("__gcov_execv");
5630 case BUILT_IN_EXECLP
:
5631 id
= get_identifier ("__gcov_execlp");
5634 case BUILT_IN_EXECLE
:
5635 id
= get_identifier ("__gcov_execle");
5638 case BUILT_IN_EXECVP
:
5639 id
= get_identifier ("__gcov_execvp");
5642 case BUILT_IN_EXECVE
:
5643 id
= get_identifier ("__gcov_execve");
5650 decl
= build_decl (FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5651 DECL_EXTERNAL (decl
) = 1;
5652 TREE_PUBLIC (decl
) = 1;
5653 DECL_ARTIFICIAL (decl
) = 1;
5654 TREE_NOTHROW (decl
) = 1;
5655 DECL_VISIBILITY (decl
) = VISIBILITY_DEFAULT
;
5656 DECL_VISIBILITY_SPECIFIED (decl
) = 1;
5657 call
= rewrite_call_expr (exp
, 0, decl
, 0);
5658 return expand_call (call
, target
, ignore
);
5663 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5664 the pointer in these functions is void*, the tree optimizers may remove
5665 casts. The mode computed in expand_builtin isn't reliable either, due
5666 to __sync_bool_compare_and_swap.
5668 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5669 group of builtins. This gives us log2 of the mode size. */
5671 static inline enum machine_mode
5672 get_builtin_sync_mode (int fcode_diff
)
5674 /* The size is not negotiable, so ask not to get BLKmode in return
5675 if the target indicates that a smaller size would be better. */
5676 return mode_for_size (BITS_PER_UNIT
<< fcode_diff
, MODE_INT
, 0);
5679 /* Expand the memory expression LOC and return the appropriate memory operand
5680 for the builtin_sync operations. */
5683 get_builtin_sync_mem (tree loc
, enum machine_mode mode
)
5687 addr
= expand_expr (loc
, NULL
, Pmode
, EXPAND_SUM
);
5689 /* Note that we explicitly do not want any alias information for this
5690 memory, so that we kill all other live memories. Otherwise we don't
5691 satisfy the full barrier semantics of the intrinsic. */
5692 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5694 set_mem_align (mem
, get_pointer_alignment (loc
, BIGGEST_ALIGNMENT
));
5695 set_mem_alias_set (mem
, ALIAS_SET_MEMORY_BARRIER
);
5696 MEM_VOLATILE_P (mem
) = 1;
5701 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5702 EXP is the CALL_EXPR. CODE is the rtx code
5703 that corresponds to the arithmetic or logical operation from the name;
5704 an exception here is that NOT actually means NAND. TARGET is an optional
5705 place for us to store the results; AFTER is true if this is the
5706 fetch_and_xxx form. IGNORE is true if we don't actually care about
5707 the result of the operation at all. */
5710 expand_builtin_sync_operation (enum machine_mode mode
, tree exp
,
5711 enum rtx_code code
, bool after
,
5712 rtx target
, bool ignore
)
5716 /* Expand the operands. */
5717 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5719 val
= expand_expr (CALL_EXPR_ARG (exp
, 1), NULL
, mode
, EXPAND_NORMAL
);
5720 /* If VAL is promoted to a wider mode, convert it back to MODE. */
5721 val
= convert_to_mode (mode
, val
, 1);
5724 return expand_sync_operation (mem
, val
, code
);
5726 return expand_sync_fetch_operation (mem
, val
, code
, after
, target
);
5729 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5730 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5731 true if this is the boolean form. TARGET is a place for us to store the
5732 results; this is NOT optional if IS_BOOL is true. */
5735 expand_builtin_compare_and_swap (enum machine_mode mode
, tree exp
,
5736 bool is_bool
, rtx target
)
5738 rtx old_val
, new_val
, mem
;
5740 /* Expand the operands. */
5741 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5744 old_val
= expand_expr (CALL_EXPR_ARG (exp
, 1), NULL
, mode
, EXPAND_NORMAL
);
5745 /* If OLD_VAL is promoted to a wider mode, convert it back to MODE. */
5746 old_val
= convert_to_mode (mode
, old_val
, 1);
5748 new_val
= expand_expr (CALL_EXPR_ARG (exp
, 2), NULL
, mode
, EXPAND_NORMAL
);
5749 /* If NEW_VAL is promoted to a wider mode, convert it back to MODE. */
5750 new_val
= convert_to_mode (mode
, new_val
, 1);
5753 return expand_bool_compare_and_swap (mem
, old_val
, new_val
, target
);
5755 return expand_val_compare_and_swap (mem
, old_val
, new_val
, target
);
5758 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5759 general form is actually an atomic exchange, and some targets only
5760 support a reduced form with the second argument being a constant 1.
5761 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5765 expand_builtin_lock_test_and_set (enum machine_mode mode
, tree exp
,
5770 /* Expand the operands. */
5771 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5772 val
= expand_expr (CALL_EXPR_ARG (exp
, 1), NULL
, mode
, EXPAND_NORMAL
);
5773 /* If VAL is promoted to a wider mode, convert it back to MODE. */
5774 val
= convert_to_mode (mode
, val
, 1);
5776 return expand_sync_lock_test_and_set (mem
, val
, target
);
5779 /* Expand the __sync_synchronize intrinsic. */
5782 expand_builtin_synchronize (void)
5786 #ifdef HAVE_memory_barrier
5787 if (HAVE_memory_barrier
)
5789 emit_insn (gen_memory_barrier ());
5794 /* If no explicit memory barrier instruction is available, create an
5795 empty asm stmt with a memory clobber. */
5796 x
= build4 (ASM_EXPR
, void_type_node
, build_string (0, ""), NULL
, NULL
,
5797 tree_cons (NULL
, build_string (6, "memory"), NULL
));
5798 ASM_VOLATILE_P (x
) = 1;
5799 expand_asm_expr (x
);
5802 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5805 expand_builtin_lock_release (enum machine_mode mode
, tree exp
)
5807 enum insn_code icode
;
5809 rtx val
= const0_rtx
;
5811 /* Expand the operands. */
5812 mem
= get_builtin_sync_mem (CALL_EXPR_ARG (exp
, 0), mode
);
5814 /* If there is an explicit operation in the md file, use it. */
5815 icode
= sync_lock_release
[mode
];
5816 if (icode
!= CODE_FOR_nothing
)
5818 if (!insn_data
[icode
].operand
[1].predicate (val
, mode
))
5819 val
= force_reg (mode
, val
);
5821 insn
= GEN_FCN (icode
) (mem
, val
);
5829 /* Otherwise we can implement this operation by emitting a barrier
5830 followed by a store of zero. */
5831 expand_builtin_synchronize ();
5832 emit_move_insn (mem
, val
);
5835 /* Expand an expression EXP that calls a built-in function,
5836 with result going to TARGET if that's convenient
5837 (and in mode MODE if that's convenient).
5838 SUBTARGET may be used as the target for computing one of EXP's operands.
5839 IGNORE is nonzero if the value is to be ignored. */
5842 expand_builtin (tree exp
, rtx target
, rtx subtarget
, enum machine_mode mode
,
5845 tree fndecl
= get_callee_fndecl (exp
);
5846 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5847 enum machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
5849 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5850 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
5852 /* When not optimizing, generate calls to library functions for a certain
5855 && !called_as_built_in (fndecl
)
5856 && DECL_ASSEMBLER_NAME_SET_P (fndecl
)
5857 && fcode
!= BUILT_IN_ALLOCA
)
5858 return expand_call (exp
, target
, ignore
);
5860 /* The built-in function expanders test for target == const0_rtx
5861 to determine whether the function's result will be ignored. */
5863 target
= const0_rtx
;
5865 /* If the result of a pure or const built-in function is ignored, and
5866 none of its arguments are volatile, we can avoid expanding the
5867 built-in call and just evaluate the arguments for side-effects. */
5868 if (target
== const0_rtx
5869 && (DECL_IS_PURE (fndecl
) || TREE_READONLY (fndecl
)))
5871 bool volatilep
= false;
5873 call_expr_arg_iterator iter
;
5875 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5876 if (TREE_THIS_VOLATILE (arg
))
5884 FOR_EACH_CALL_EXPR_ARG (arg
, iter
, exp
)
5885 expand_expr (arg
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
5892 CASE_FLT_FN (BUILT_IN_FABS
):
5893 target
= expand_builtin_fabs (exp
, target
, subtarget
);
5898 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
5899 target
= expand_builtin_copysign (exp
, target
, subtarget
);
5904 /* Just do a normal library call if we were unable to fold
5906 CASE_FLT_FN (BUILT_IN_CABS
):
5909 CASE_FLT_FN (BUILT_IN_EXP
):
5910 CASE_FLT_FN (BUILT_IN_EXP10
):
5911 CASE_FLT_FN (BUILT_IN_POW10
):
5912 CASE_FLT_FN (BUILT_IN_EXP2
):
5913 CASE_FLT_FN (BUILT_IN_EXPM1
):
5914 CASE_FLT_FN (BUILT_IN_LOGB
):
5915 CASE_FLT_FN (BUILT_IN_LOG
):
5916 CASE_FLT_FN (BUILT_IN_LOG10
):
5917 CASE_FLT_FN (BUILT_IN_LOG2
):
5918 CASE_FLT_FN (BUILT_IN_LOG1P
):
5919 CASE_FLT_FN (BUILT_IN_TAN
):
5920 CASE_FLT_FN (BUILT_IN_ASIN
):
5921 CASE_FLT_FN (BUILT_IN_ACOS
):
5922 CASE_FLT_FN (BUILT_IN_ATAN
):
5923 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5924 because of possible accuracy problems. */
5925 if (! flag_unsafe_math_optimizations
)
5927 CASE_FLT_FN (BUILT_IN_SQRT
):
5928 CASE_FLT_FN (BUILT_IN_FLOOR
):
5929 CASE_FLT_FN (BUILT_IN_CEIL
):
5930 CASE_FLT_FN (BUILT_IN_TRUNC
):
5931 CASE_FLT_FN (BUILT_IN_ROUND
):
5932 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
5933 CASE_FLT_FN (BUILT_IN_RINT
):
5934 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
5939 CASE_FLT_FN (BUILT_IN_ILOGB
):
5940 if (! flag_unsafe_math_optimizations
)
5942 CASE_FLT_FN (BUILT_IN_ISINF
):
5943 target
= expand_builtin_interclass_mathfn (exp
, target
, subtarget
);
5948 CASE_FLT_FN (BUILT_IN_LCEIL
):
5949 CASE_FLT_FN (BUILT_IN_LLCEIL
):
5950 CASE_FLT_FN (BUILT_IN_LFLOOR
):
5951 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
5952 target
= expand_builtin_int_roundingfn (exp
, target
, subtarget
);
5957 CASE_FLT_FN (BUILT_IN_LRINT
):
5958 CASE_FLT_FN (BUILT_IN_LLRINT
):
5959 CASE_FLT_FN (BUILT_IN_LROUND
):
5960 CASE_FLT_FN (BUILT_IN_LLROUND
):
5961 target
= expand_builtin_int_roundingfn_2 (exp
, target
, subtarget
);
5966 CASE_FLT_FN (BUILT_IN_POW
):
5967 target
= expand_builtin_pow (exp
, target
, subtarget
);
5972 CASE_FLT_FN (BUILT_IN_POWI
):
5973 target
= expand_builtin_powi (exp
, target
, subtarget
);
5978 CASE_FLT_FN (BUILT_IN_ATAN2
):
5979 CASE_FLT_FN (BUILT_IN_LDEXP
):
5980 CASE_FLT_FN (BUILT_IN_SCALB
):
5981 CASE_FLT_FN (BUILT_IN_SCALBN
):
5982 CASE_FLT_FN (BUILT_IN_SCALBLN
):
5983 if (! flag_unsafe_math_optimizations
)
5986 CASE_FLT_FN (BUILT_IN_FMOD
):
5987 CASE_FLT_FN (BUILT_IN_REMAINDER
):
5988 CASE_FLT_FN (BUILT_IN_DREM
):
5989 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
5994 CASE_FLT_FN (BUILT_IN_CEXPI
):
5995 target
= expand_builtin_cexpi (exp
, target
, subtarget
);
5996 gcc_assert (target
);
5999 CASE_FLT_FN (BUILT_IN_SIN
):
6000 CASE_FLT_FN (BUILT_IN_COS
):
6001 if (! flag_unsafe_math_optimizations
)
6003 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
6008 CASE_FLT_FN (BUILT_IN_SINCOS
):
6009 if (! flag_unsafe_math_optimizations
)
6011 target
= expand_builtin_sincos (exp
);
6016 case BUILT_IN_APPLY_ARGS
:
6017 return expand_builtin_apply_args ();
6019 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6020 FUNCTION with a copy of the parameters described by
6021 ARGUMENTS, and ARGSIZE. It returns a block of memory
6022 allocated on the stack into which is stored all the registers
6023 that might possibly be used for returning the result of a
6024 function. ARGUMENTS is the value returned by
6025 __builtin_apply_args. ARGSIZE is the number of bytes of
6026 arguments that must be copied. ??? How should this value be
6027 computed? We'll also need a safe worst case value for varargs
6029 case BUILT_IN_APPLY
:
6030 if (!validate_arglist (exp
, POINTER_TYPE
,
6031 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
6032 && !validate_arglist (exp
, REFERENCE_TYPE
,
6033 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6039 ops
[0] = expand_normal (CALL_EXPR_ARG (exp
, 0));
6040 ops
[1] = expand_normal (CALL_EXPR_ARG (exp
, 1));
6041 ops
[2] = expand_normal (CALL_EXPR_ARG (exp
, 2));
6043 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
6046 /* __builtin_return (RESULT) causes the function to return the
6047 value described by RESULT. RESULT is address of the block of
6048 memory returned by __builtin_apply. */
6049 case BUILT_IN_RETURN
:
6050 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6051 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp
, 0)));
6054 case BUILT_IN_SAVEREGS
:
6055 return expand_builtin_saveregs ();
6057 case BUILT_IN_ARGS_INFO
:
6058 return expand_builtin_args_info (exp
);
6060 /* Return the address of the first anonymous stack arg. */
6061 case BUILT_IN_NEXT_ARG
:
6062 if (fold_builtin_next_arg (exp
, false))
6064 return expand_builtin_next_arg ();
6066 case BUILT_IN_CLASSIFY_TYPE
:
6067 return expand_builtin_classify_type (exp
);
6069 case BUILT_IN_CONSTANT_P
:
6072 case BUILT_IN_FRAME_ADDRESS
:
6073 case BUILT_IN_RETURN_ADDRESS
:
6074 return expand_builtin_frame_address (fndecl
, exp
);
6076 /* Returns the address of the area where the structure is returned.
6078 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
6079 if (call_expr_nargs (exp
) != 0
6080 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
6081 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
6084 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
6086 case BUILT_IN_ALLOCA
:
6087 target
= expand_builtin_alloca (exp
, target
);
6092 case BUILT_IN_STACK_SAVE
:
6093 return expand_stack_save ();
6095 case BUILT_IN_STACK_RESTORE
:
6096 expand_stack_restore (CALL_EXPR_ARG (exp
, 0));
6099 case BUILT_IN_BSWAP32
:
6100 case BUILT_IN_BSWAP64
:
6101 target
= expand_builtin_bswap (exp
, target
, subtarget
);
6107 CASE_INT_FN (BUILT_IN_FFS
):
6108 case BUILT_IN_FFSIMAX
:
6109 target
= expand_builtin_unop (target_mode
, exp
, target
,
6110 subtarget
, ffs_optab
);
6115 CASE_INT_FN (BUILT_IN_CLZ
):
6116 case BUILT_IN_CLZIMAX
:
6117 target
= expand_builtin_unop (target_mode
, exp
, target
,
6118 subtarget
, clz_optab
);
6123 CASE_INT_FN (BUILT_IN_CTZ
):
6124 case BUILT_IN_CTZIMAX
:
6125 target
= expand_builtin_unop (target_mode
, exp
, target
,
6126 subtarget
, ctz_optab
);
6131 CASE_INT_FN (BUILT_IN_POPCOUNT
):
6132 case BUILT_IN_POPCOUNTIMAX
:
6133 target
= expand_builtin_unop (target_mode
, exp
, target
,
6134 subtarget
, popcount_optab
);
6139 CASE_INT_FN (BUILT_IN_PARITY
):
6140 case BUILT_IN_PARITYIMAX
:
6141 target
= expand_builtin_unop (target_mode
, exp
, target
,
6142 subtarget
, parity_optab
);
6147 case BUILT_IN_STRLEN
:
6148 target
= expand_builtin_strlen (exp
, target
, target_mode
);
6153 case BUILT_IN_STRCPY
:
6154 target
= expand_builtin_strcpy (fndecl
, exp
, target
, mode
);
6159 case BUILT_IN_STRNCPY
:
6160 target
= expand_builtin_strncpy (exp
, target
, mode
);
6165 case BUILT_IN_STPCPY
:
6166 target
= expand_builtin_stpcpy (exp
, target
, mode
);
6171 case BUILT_IN_STRCAT
:
6172 target
= expand_builtin_strcat (fndecl
, exp
, target
, mode
);
6177 case BUILT_IN_STRNCAT
:
6178 target
= expand_builtin_strncat (exp
, target
, mode
);
6183 case BUILT_IN_STRSPN
:
6184 target
= expand_builtin_strspn (exp
, target
, mode
);
6189 case BUILT_IN_STRCSPN
:
6190 target
= expand_builtin_strcspn (exp
, target
, mode
);
6195 case BUILT_IN_STRSTR
:
6196 target
= expand_builtin_strstr (exp
, target
, mode
);
6201 case BUILT_IN_STRPBRK
:
6202 target
= expand_builtin_strpbrk (exp
, target
, mode
);
6207 case BUILT_IN_INDEX
:
6208 case BUILT_IN_STRCHR
:
6209 target
= expand_builtin_strchr (exp
, target
, mode
);
6214 case BUILT_IN_RINDEX
:
6215 case BUILT_IN_STRRCHR
:
6216 target
= expand_builtin_strrchr (exp
, target
, mode
);
6221 case BUILT_IN_MEMCPY
:
6222 target
= expand_builtin_memcpy (exp
, target
, mode
);
6227 case BUILT_IN_MEMPCPY
:
6228 target
= expand_builtin_mempcpy (exp
, target
, mode
);
6233 case BUILT_IN_MEMMOVE
:
6234 target
= expand_builtin_memmove (exp
, target
, mode
, ignore
);
6239 case BUILT_IN_BCOPY
:
6240 target
= expand_builtin_bcopy (exp
, ignore
);
6245 case BUILT_IN_MEMSET
:
6246 target
= expand_builtin_memset (exp
, target
, mode
);
6251 case BUILT_IN_BZERO
:
6252 target
= expand_builtin_bzero (exp
);
6257 case BUILT_IN_STRCMP
:
6258 target
= expand_builtin_strcmp (exp
, target
, mode
);
6263 case BUILT_IN_STRNCMP
:
6264 target
= expand_builtin_strncmp (exp
, target
, mode
);
6270 case BUILT_IN_MEMCMP
:
6271 target
= expand_builtin_memcmp (exp
, target
, mode
);
6276 case BUILT_IN_SETJMP
:
6277 /* This should have been lowered to the builtins below. */
6280 case BUILT_IN_SETJMP_SETUP
:
6281 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6282 and the receiver label. */
6283 if (validate_arglist (exp
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
6285 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6286 VOIDmode
, EXPAND_NORMAL
);
6287 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 1), 0);
6288 rtx label_r
= label_rtx (label
);
6290 /* This is copied from the handling of non-local gotos. */
6291 expand_builtin_setjmp_setup (buf_addr
, label_r
);
6292 nonlocal_goto_handler_labels
6293 = gen_rtx_EXPR_LIST (VOIDmode
, label_r
,
6294 nonlocal_goto_handler_labels
);
6295 /* ??? Do not let expand_label treat us as such since we would
6296 not want to be both on the list of non-local labels and on
6297 the list of forced labels. */
6298 FORCED_LABEL (label
) = 0;
6303 case BUILT_IN_SETJMP_DISPATCHER
:
6304 /* __builtin_setjmp_dispatcher is passed the dispatcher label. */
6305 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6307 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6308 rtx label_r
= label_rtx (label
);
6310 /* Remove the dispatcher label from the list of non-local labels
6311 since the receiver labels have been added to it above. */
6312 remove_node_from_expr_list (label_r
, &nonlocal_goto_handler_labels
);
6317 case BUILT_IN_SETJMP_RECEIVER
:
6318 /* __builtin_setjmp_receiver is passed the receiver label. */
6319 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6321 tree label
= TREE_OPERAND (CALL_EXPR_ARG (exp
, 0), 0);
6322 rtx label_r
= label_rtx (label
);
6324 expand_builtin_setjmp_receiver (label_r
);
6329 /* __builtin_longjmp is passed a pointer to an array of five words.
6330 It's similar to the C library longjmp function but works with
6331 __builtin_setjmp above. */
6332 case BUILT_IN_LONGJMP
:
6333 if (validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
6335 rtx buf_addr
= expand_expr (CALL_EXPR_ARG (exp
, 0), subtarget
,
6336 VOIDmode
, EXPAND_NORMAL
);
6337 rtx value
= expand_normal (CALL_EXPR_ARG (exp
, 1));
6339 if (value
!= const1_rtx
)
6341 error ("%<__builtin_longjmp%> second argument must be 1");
6345 expand_builtin_longjmp (buf_addr
, value
);
6350 case BUILT_IN_NONLOCAL_GOTO
:
6351 target
= expand_builtin_nonlocal_goto (exp
);
6356 /* This updates the setjmp buffer that is its argument with the value
6357 of the current stack pointer. */
6358 case BUILT_IN_UPDATE_SETJMP_BUF
:
6359 if (validate_arglist (exp
, POINTER_TYPE
, VOID_TYPE
))
6362 = expand_normal (CALL_EXPR_ARG (exp
, 0));
6364 expand_builtin_update_setjmp_buf (buf_addr
);
6370 expand_builtin_trap ();
6373 case BUILT_IN_PRINTF
:
6374 target
= expand_builtin_printf (exp
, target
, mode
, false);
6379 case BUILT_IN_PRINTF_UNLOCKED
:
6380 target
= expand_builtin_printf (exp
, target
, mode
, true);
6385 case BUILT_IN_FPUTS
:
6386 target
= expand_builtin_fputs (exp
, target
, false);
6390 case BUILT_IN_FPUTS_UNLOCKED
:
6391 target
= expand_builtin_fputs (exp
, target
, true);
6396 case BUILT_IN_FPRINTF
:
6397 target
= expand_builtin_fprintf (exp
, target
, mode
, false);
6402 case BUILT_IN_FPRINTF_UNLOCKED
:
6403 target
= expand_builtin_fprintf (exp
, target
, mode
, true);
6408 case BUILT_IN_SPRINTF
:
6409 target
= expand_builtin_sprintf (exp
, target
, mode
);
6414 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
6415 target
= expand_builtin_signbit (exp
, target
);
6420 /* Various hooks for the DWARF 2 __throw routine. */
6421 case BUILT_IN_UNWIND_INIT
:
6422 expand_builtin_unwind_init ();
6424 case BUILT_IN_DWARF_CFA
:
6425 return virtual_cfa_rtx
;
6426 #ifdef DWARF2_UNWIND_INFO
6427 case BUILT_IN_DWARF_SP_COLUMN
:
6428 return expand_builtin_dwarf_sp_column ();
6429 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6430 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp
, 0));
6433 case BUILT_IN_FROB_RETURN_ADDR
:
6434 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp
, 0));
6435 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6436 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp
, 0));
6437 case BUILT_IN_EH_RETURN
:
6438 expand_builtin_eh_return (CALL_EXPR_ARG (exp
, 0),
6439 CALL_EXPR_ARG (exp
, 1));
6441 #ifdef EH_RETURN_DATA_REGNO
6442 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6443 return expand_builtin_eh_return_data_regno (exp
);
6445 case BUILT_IN_EXTEND_POINTER
:
6446 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp
, 0));
6448 case BUILT_IN_VA_START
:
6449 case BUILT_IN_STDARG_START
:
6450 return expand_builtin_va_start (exp
);
6451 case BUILT_IN_VA_END
:
6452 return expand_builtin_va_end (exp
);
6453 case BUILT_IN_VA_COPY
:
6454 return expand_builtin_va_copy (exp
);
6455 case BUILT_IN_EXPECT
:
6456 return expand_builtin_expect (exp
, target
);
6457 case BUILT_IN_PREFETCH
:
6458 expand_builtin_prefetch (exp
);
6461 case BUILT_IN_PROFILE_FUNC_ENTER
:
6462 return expand_builtin_profile_func (false);
6463 case BUILT_IN_PROFILE_FUNC_EXIT
:
6464 return expand_builtin_profile_func (true);
6466 case BUILT_IN_INIT_TRAMPOLINE
:
6467 return expand_builtin_init_trampoline (exp
);
6468 case BUILT_IN_ADJUST_TRAMPOLINE
:
6469 return expand_builtin_adjust_trampoline (exp
);
6472 case BUILT_IN_EXECL
:
6473 case BUILT_IN_EXECV
:
6474 case BUILT_IN_EXECLP
:
6475 case BUILT_IN_EXECLE
:
6476 case BUILT_IN_EXECVP
:
6477 case BUILT_IN_EXECVE
:
6478 target
= expand_builtin_fork_or_exec (fndecl
, exp
, target
, ignore
);
6483 case BUILT_IN_FETCH_AND_ADD_1
:
6484 case BUILT_IN_FETCH_AND_ADD_2
:
6485 case BUILT_IN_FETCH_AND_ADD_4
:
6486 case BUILT_IN_FETCH_AND_ADD_8
:
6487 case BUILT_IN_FETCH_AND_ADD_16
:
6488 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_ADD_1
);
6489 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
,
6490 false, target
, ignore
);
6495 case BUILT_IN_FETCH_AND_SUB_1
:
6496 case BUILT_IN_FETCH_AND_SUB_2
:
6497 case BUILT_IN_FETCH_AND_SUB_4
:
6498 case BUILT_IN_FETCH_AND_SUB_8
:
6499 case BUILT_IN_FETCH_AND_SUB_16
:
6500 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_SUB_1
);
6501 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
,
6502 false, target
, ignore
);
6507 case BUILT_IN_FETCH_AND_OR_1
:
6508 case BUILT_IN_FETCH_AND_OR_2
:
6509 case BUILT_IN_FETCH_AND_OR_4
:
6510 case BUILT_IN_FETCH_AND_OR_8
:
6511 case BUILT_IN_FETCH_AND_OR_16
:
6512 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_OR_1
);
6513 target
= expand_builtin_sync_operation (mode
, exp
, IOR
,
6514 false, target
, ignore
);
6519 case BUILT_IN_FETCH_AND_AND_1
:
6520 case BUILT_IN_FETCH_AND_AND_2
:
6521 case BUILT_IN_FETCH_AND_AND_4
:
6522 case BUILT_IN_FETCH_AND_AND_8
:
6523 case BUILT_IN_FETCH_AND_AND_16
:
6524 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_AND_1
);
6525 target
= expand_builtin_sync_operation (mode
, exp
, AND
,
6526 false, target
, ignore
);
6531 case BUILT_IN_FETCH_AND_XOR_1
:
6532 case BUILT_IN_FETCH_AND_XOR_2
:
6533 case BUILT_IN_FETCH_AND_XOR_4
:
6534 case BUILT_IN_FETCH_AND_XOR_8
:
6535 case BUILT_IN_FETCH_AND_XOR_16
:
6536 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_XOR_1
);
6537 target
= expand_builtin_sync_operation (mode
, exp
, XOR
,
6538 false, target
, ignore
);
6543 case BUILT_IN_FETCH_AND_NAND_1
:
6544 case BUILT_IN_FETCH_AND_NAND_2
:
6545 case BUILT_IN_FETCH_AND_NAND_4
:
6546 case BUILT_IN_FETCH_AND_NAND_8
:
6547 case BUILT_IN_FETCH_AND_NAND_16
:
6548 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_FETCH_AND_NAND_1
);
6549 target
= expand_builtin_sync_operation (mode
, exp
, NOT
,
6550 false, target
, ignore
);
6555 case BUILT_IN_ADD_AND_FETCH_1
:
6556 case BUILT_IN_ADD_AND_FETCH_2
:
6557 case BUILT_IN_ADD_AND_FETCH_4
:
6558 case BUILT_IN_ADD_AND_FETCH_8
:
6559 case BUILT_IN_ADD_AND_FETCH_16
:
6560 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_ADD_AND_FETCH_1
);
6561 target
= expand_builtin_sync_operation (mode
, exp
, PLUS
,
6562 true, target
, ignore
);
6567 case BUILT_IN_SUB_AND_FETCH_1
:
6568 case BUILT_IN_SUB_AND_FETCH_2
:
6569 case BUILT_IN_SUB_AND_FETCH_4
:
6570 case BUILT_IN_SUB_AND_FETCH_8
:
6571 case BUILT_IN_SUB_AND_FETCH_16
:
6572 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_SUB_AND_FETCH_1
);
6573 target
= expand_builtin_sync_operation (mode
, exp
, MINUS
,
6574 true, target
, ignore
);
6579 case BUILT_IN_OR_AND_FETCH_1
:
6580 case BUILT_IN_OR_AND_FETCH_2
:
6581 case BUILT_IN_OR_AND_FETCH_4
:
6582 case BUILT_IN_OR_AND_FETCH_8
:
6583 case BUILT_IN_OR_AND_FETCH_16
:
6584 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_OR_AND_FETCH_1
);
6585 target
= expand_builtin_sync_operation (mode
, exp
, IOR
,
6586 true, target
, ignore
);
6591 case BUILT_IN_AND_AND_FETCH_1
:
6592 case BUILT_IN_AND_AND_FETCH_2
:
6593 case BUILT_IN_AND_AND_FETCH_4
:
6594 case BUILT_IN_AND_AND_FETCH_8
:
6595 case BUILT_IN_AND_AND_FETCH_16
:
6596 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_AND_AND_FETCH_1
);
6597 target
= expand_builtin_sync_operation (mode
, exp
, AND
,
6598 true, target
, ignore
);
6603 case BUILT_IN_XOR_AND_FETCH_1
:
6604 case BUILT_IN_XOR_AND_FETCH_2
:
6605 case BUILT_IN_XOR_AND_FETCH_4
:
6606 case BUILT_IN_XOR_AND_FETCH_8
:
6607 case BUILT_IN_XOR_AND_FETCH_16
:
6608 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_XOR_AND_FETCH_1
);
6609 target
= expand_builtin_sync_operation (mode
, exp
, XOR
,
6610 true, target
, ignore
);
6615 case BUILT_IN_NAND_AND_FETCH_1
:
6616 case BUILT_IN_NAND_AND_FETCH_2
:
6617 case BUILT_IN_NAND_AND_FETCH_4
:
6618 case BUILT_IN_NAND_AND_FETCH_8
:
6619 case BUILT_IN_NAND_AND_FETCH_16
:
6620 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_NAND_AND_FETCH_1
);
6621 target
= expand_builtin_sync_operation (mode
, exp
, NOT
,
6622 true, target
, ignore
);
6627 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1
:
6628 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2
:
6629 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4
:
6630 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8
:
6631 case BUILT_IN_BOOL_COMPARE_AND_SWAP_16
:
6632 if (mode
== VOIDmode
)
6633 mode
= TYPE_MODE (boolean_type_node
);
6634 if (!target
|| !register_operand (target
, mode
))
6635 target
= gen_reg_rtx (mode
);
6637 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_BOOL_COMPARE_AND_SWAP_1
);
6638 target
= expand_builtin_compare_and_swap (mode
, exp
, true, target
);
6643 case BUILT_IN_VAL_COMPARE_AND_SWAP_1
:
6644 case BUILT_IN_VAL_COMPARE_AND_SWAP_2
:
6645 case BUILT_IN_VAL_COMPARE_AND_SWAP_4
:
6646 case BUILT_IN_VAL_COMPARE_AND_SWAP_8
:
6647 case BUILT_IN_VAL_COMPARE_AND_SWAP_16
:
6648 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_VAL_COMPARE_AND_SWAP_1
);
6649 target
= expand_builtin_compare_and_swap (mode
, exp
, false, target
);
6654 case BUILT_IN_LOCK_TEST_AND_SET_1
:
6655 case BUILT_IN_LOCK_TEST_AND_SET_2
:
6656 case BUILT_IN_LOCK_TEST_AND_SET_4
:
6657 case BUILT_IN_LOCK_TEST_AND_SET_8
:
6658 case BUILT_IN_LOCK_TEST_AND_SET_16
:
6659 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_LOCK_TEST_AND_SET_1
);
6660 target
= expand_builtin_lock_test_and_set (mode
, exp
, target
);
6665 case BUILT_IN_LOCK_RELEASE_1
:
6666 case BUILT_IN_LOCK_RELEASE_2
:
6667 case BUILT_IN_LOCK_RELEASE_4
:
6668 case BUILT_IN_LOCK_RELEASE_8
:
6669 case BUILT_IN_LOCK_RELEASE_16
:
6670 mode
= get_builtin_sync_mode (fcode
- BUILT_IN_LOCK_RELEASE_1
);
6671 expand_builtin_lock_release (mode
, exp
);
6674 case BUILT_IN_SYNCHRONIZE
:
6675 expand_builtin_synchronize ();
6678 case BUILT_IN_OBJECT_SIZE
:
6679 return expand_builtin_object_size (exp
);
6681 case BUILT_IN_MEMCPY_CHK
:
6682 case BUILT_IN_MEMPCPY_CHK
:
6683 case BUILT_IN_MEMMOVE_CHK
:
6684 case BUILT_IN_MEMSET_CHK
:
6685 target
= expand_builtin_memory_chk (exp
, target
, mode
, fcode
);
6690 case BUILT_IN_STRCPY_CHK
:
6691 case BUILT_IN_STPCPY_CHK
:
6692 case BUILT_IN_STRNCPY_CHK
:
6693 case BUILT_IN_STRCAT_CHK
:
6694 case BUILT_IN_STRNCAT_CHK
:
6695 case BUILT_IN_SNPRINTF_CHK
:
6696 case BUILT_IN_VSNPRINTF_CHK
:
6697 maybe_emit_chk_warning (exp
, fcode
);
6700 case BUILT_IN_SPRINTF_CHK
:
6701 case BUILT_IN_VSPRINTF_CHK
:
6702 maybe_emit_sprintf_chk_warning (exp
, fcode
);
6705 default: /* just do library call, if unknown builtin */
6709 /* The switch statement above can drop through to cause the function
6710 to be called normally. */
6711 return expand_call (exp
, target
, ignore
);
6714 /* Determine whether a tree node represents a call to a built-in
6715 function. If the tree T is a call to a built-in function with
6716 the right number of arguments of the appropriate types, return
6717 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6718 Otherwise the return value is END_BUILTINS. */
6720 enum built_in_function
6721 builtin_mathfn_code (tree t
)
6723 tree fndecl
, arg
, parmlist
;
6724 tree argtype
, parmtype
;
6725 call_expr_arg_iterator iter
;
6727 if (TREE_CODE (t
) != CALL_EXPR
6728 || TREE_CODE (CALL_EXPR_FN (t
)) != ADDR_EXPR
)
6729 return END_BUILTINS
;
6731 fndecl
= get_callee_fndecl (t
);
6732 if (fndecl
== NULL_TREE
6733 || TREE_CODE (fndecl
) != FUNCTION_DECL
6734 || ! DECL_BUILT_IN (fndecl
)
6735 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6736 return END_BUILTINS
;
6738 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
6739 init_call_expr_arg_iterator (t
, &iter
);
6740 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
6742 /* If a function doesn't take a variable number of arguments,
6743 the last element in the list will have type `void'. */
6744 parmtype
= TREE_VALUE (parmlist
);
6745 if (VOID_TYPE_P (parmtype
))
6747 if (more_call_expr_args_p (&iter
))
6748 return END_BUILTINS
;
6749 return DECL_FUNCTION_CODE (fndecl
);
6752 if (! more_call_expr_args_p (&iter
))
6753 return END_BUILTINS
;
6755 arg
= next_call_expr_arg (&iter
);
6756 argtype
= TREE_TYPE (arg
);
6758 if (SCALAR_FLOAT_TYPE_P (parmtype
))
6760 if (! SCALAR_FLOAT_TYPE_P (argtype
))
6761 return END_BUILTINS
;
6763 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
6765 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
6766 return END_BUILTINS
;
6768 else if (POINTER_TYPE_P (parmtype
))
6770 if (! POINTER_TYPE_P (argtype
))
6771 return END_BUILTINS
;
6773 else if (INTEGRAL_TYPE_P (parmtype
))
6775 if (! INTEGRAL_TYPE_P (argtype
))
6776 return END_BUILTINS
;
6779 return END_BUILTINS
;
6782 /* Variable-length argument list. */
6783 return DECL_FUNCTION_CODE (fndecl
);
6786 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6787 evaluate to a constant. */
6790 fold_builtin_constant_p (tree arg
)
6792 /* We return 1 for a numeric type that's known to be a constant
6793 value at compile-time or for an aggregate type that's a
6794 literal constant. */
6797 /* If we know this is a constant, emit the constant of one. */
6798 if (CONSTANT_CLASS_P (arg
)
6799 || (TREE_CODE (arg
) == CONSTRUCTOR
6800 && TREE_CONSTANT (arg
)))
6801 return integer_one_node
;
6802 if (TREE_CODE (arg
) == ADDR_EXPR
)
6804 tree op
= TREE_OPERAND (arg
, 0);
6805 if (TREE_CODE (op
) == STRING_CST
6806 || (TREE_CODE (op
) == ARRAY_REF
6807 && integer_zerop (TREE_OPERAND (op
, 1))
6808 && TREE_CODE (TREE_OPERAND (op
, 0)) == STRING_CST
))
6809 return integer_one_node
;
6812 /* If this expression has side effects, show we don't know it to be a
6813 constant. Likewise if it's a pointer or aggregate type since in
6814 those case we only want literals, since those are only optimized
6815 when generating RTL, not later.
6816 And finally, if we are compiling an initializer, not code, we
6817 need to return a definite result now; there's not going to be any
6818 more optimization done. */
6819 if (TREE_SIDE_EFFECTS (arg
)
6820 || AGGREGATE_TYPE_P (TREE_TYPE (arg
))
6821 || POINTER_TYPE_P (TREE_TYPE (arg
))
6823 || folding_initializer
)
6824 return integer_zero_node
;
6829 /* Fold a call to __builtin_expect with argument ARG, if we expect that a
6830 comparison against the argument will fold to a constant. In practice,
6831 this means a true constant or the address of a non-weak symbol. */
6834 fold_builtin_expect (tree arg
)
6838 /* If the argument isn't invariant, then there's nothing we can do. */
6839 if (!TREE_INVARIANT (arg
))
6842 /* If we're looking at an address of a weak decl, then do not fold. */
6845 if (TREE_CODE (inner
) == ADDR_EXPR
)
6849 inner
= TREE_OPERAND (inner
, 0);
6851 while (TREE_CODE (inner
) == COMPONENT_REF
6852 || TREE_CODE (inner
) == ARRAY_REF
);
6853 if (DECL_P (inner
) && DECL_WEAK (inner
))
6857 /* Otherwise, ARG already has the proper type for the return value. */
6861 /* Fold a call to __builtin_classify_type with argument ARG. */
6864 fold_builtin_classify_type (tree arg
)
6867 return build_int_cst (NULL_TREE
, no_type_class
);
6869 return build_int_cst (NULL_TREE
, type_to_class (TREE_TYPE (arg
)));
6872 /* Fold a call to __builtin_strlen with argument ARG. */
6875 fold_builtin_strlen (tree arg
)
6877 if (!validate_arg (arg
, POINTER_TYPE
))
6881 tree len
= c_strlen (arg
, 0);
6885 /* Convert from the internal "sizetype" type to "size_t". */
6887 len
= fold_convert (size_type_node
, len
);
6895 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6898 fold_builtin_inf (tree type
, int warn
)
6900 REAL_VALUE_TYPE real
;
6902 /* __builtin_inff is intended to be usable to define INFINITY on all
6903 targets. If an infinity is not available, INFINITY expands "to a
6904 positive constant of type float that overflows at translation
6905 time", footnote "In this case, using INFINITY will violate the
6906 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6907 Thus we pedwarn to ensure this constraint violation is
6909 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
6910 pedwarn ("target format does not support infinity");
6913 return build_real (type
, real
);
6916 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
6919 fold_builtin_nan (tree arg
, tree type
, int quiet
)
6921 REAL_VALUE_TYPE real
;
6924 if (!validate_arg (arg
, POINTER_TYPE
))
6926 str
= c_getstr (arg
);
6930 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
6933 return build_real (type
, real
);
6936 /* Return true if the floating point expression T has an integer value.
6937 We also allow +Inf, -Inf and NaN to be considered integer values. */
6940 integer_valued_real_p (tree t
)
6942 switch (TREE_CODE (t
))
6949 case NON_LVALUE_EXPR
:
6950 return integer_valued_real_p (TREE_OPERAND (t
, 0));
6955 return integer_valued_real_p (GENERIC_TREE_OPERAND (t
, 1));
6962 return integer_valued_real_p (TREE_OPERAND (t
, 0))
6963 && integer_valued_real_p (TREE_OPERAND (t
, 1));
6966 return integer_valued_real_p (TREE_OPERAND (t
, 1))
6967 && integer_valued_real_p (TREE_OPERAND (t
, 2));
6970 return real_isinteger (TREE_REAL_CST_PTR (t
), TYPE_MODE (TREE_TYPE (t
)));
6974 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
6975 if (TREE_CODE (type
) == INTEGER_TYPE
)
6977 if (TREE_CODE (type
) == REAL_TYPE
)
6978 return integer_valued_real_p (TREE_OPERAND (t
, 0));
6983 switch (builtin_mathfn_code (t
))
6985 CASE_FLT_FN (BUILT_IN_CEIL
):
6986 CASE_FLT_FN (BUILT_IN_FLOOR
):
6987 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
6988 CASE_FLT_FN (BUILT_IN_RINT
):
6989 CASE_FLT_FN (BUILT_IN_ROUND
):
6990 CASE_FLT_FN (BUILT_IN_TRUNC
):
6993 CASE_FLT_FN (BUILT_IN_FMIN
):
6994 CASE_FLT_FN (BUILT_IN_FMAX
):
6995 return integer_valued_real_p (CALL_EXPR_ARG (t
, 0))
6996 && integer_valued_real_p (CALL_EXPR_ARG (t
, 1));
7009 /* FNDECL is assumed to be a builtin where truncation can be propagated
7010 across (for instance floor((double)f) == (double)floorf (f).
7011 Do the transformation for a call with argument ARG. */
7014 fold_trunc_transparent_mathfn (tree fndecl
, tree arg
)
7016 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7018 if (!validate_arg (arg
, REAL_TYPE
))
7021 /* Integer rounding functions are idempotent. */
7022 if (fcode
== builtin_mathfn_code (arg
))
7025 /* If argument is already integer valued, and we don't need to worry
7026 about setting errno, there's no need to perform rounding. */
7027 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7032 tree arg0
= strip_float_extensions (arg
);
7033 tree ftype
= TREE_TYPE (TREE_TYPE (fndecl
));
7034 tree newtype
= TREE_TYPE (arg0
);
7037 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7038 && (decl
= mathfn_built_in (newtype
, fcode
)))
7039 return fold_convert (ftype
,
7040 build_call_expr (decl
, 1,
7041 fold_convert (newtype
, arg0
)));
7046 /* FNDECL is assumed to be builtin which can narrow the FP type of
7047 the argument, for instance lround((double)f) -> lroundf (f).
7048 Do the transformation for a call with argument ARG. */
7051 fold_fixed_mathfn (tree fndecl
, tree arg
)
7053 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
7055 if (!validate_arg (arg
, REAL_TYPE
))
7058 /* If argument is already integer valued, and we don't need to worry
7059 about setting errno, there's no need to perform rounding. */
7060 if (! flag_errno_math
&& integer_valued_real_p (arg
))
7061 return fold_build1 (FIX_TRUNC_EXPR
, TREE_TYPE (TREE_TYPE (fndecl
)), arg
);
7065 tree ftype
= TREE_TYPE (arg
);
7066 tree arg0
= strip_float_extensions (arg
);
7067 tree newtype
= TREE_TYPE (arg0
);
7070 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
7071 && (decl
= mathfn_built_in (newtype
, fcode
)))
7072 return build_call_expr (decl
, 1, fold_convert (newtype
, arg0
));
7075 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7076 sizeof (long long) == sizeof (long). */
7077 if (TYPE_PRECISION (long_long_integer_type_node
)
7078 == TYPE_PRECISION (long_integer_type_node
))
7080 tree newfn
= NULL_TREE
;
7083 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7084 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LCEIL
);
7087 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7088 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LFLOOR
);
7091 CASE_FLT_FN (BUILT_IN_LLROUND
):
7092 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LROUND
);
7095 CASE_FLT_FN (BUILT_IN_LLRINT
):
7096 newfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_LRINT
);
7105 tree newcall
= build_call_expr(newfn
, 1, arg
);
7106 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), newcall
);
7113 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7114 return type. Return NULL_TREE if no simplification can be made. */
7117 fold_builtin_cabs (tree arg
, tree type
, tree fndecl
)
7121 if (TREE_CODE (TREE_TYPE (arg
)) != COMPLEX_TYPE
7122 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
7125 /* Calculate the result when the argument is a constant. */
7126 if (TREE_CODE (arg
) == COMPLEX_CST
7127 && (res
= do_mpfr_arg2 (TREE_REALPART (arg
), TREE_IMAGPART (arg
),
7131 if (TREE_CODE (arg
) == COMPLEX_EXPR
)
7133 tree real
= TREE_OPERAND (arg
, 0);
7134 tree imag
= TREE_OPERAND (arg
, 1);
7136 /* If either part is zero, cabs is fabs of the other. */
7137 if (real_zerop (real
))
7138 return fold_build1 (ABS_EXPR
, type
, imag
);
7139 if (real_zerop (imag
))
7140 return fold_build1 (ABS_EXPR
, type
, real
);
7142 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7143 if (flag_unsafe_math_optimizations
7144 && operand_equal_p (real
, imag
, OEP_PURE_SAME
))
7146 const REAL_VALUE_TYPE sqrt2_trunc
7147 = real_value_truncate (TYPE_MODE (type
), dconstsqrt2
);
7149 return fold_build2 (MULT_EXPR
, type
,
7150 fold_build1 (ABS_EXPR
, type
, real
),
7151 build_real (type
, sqrt2_trunc
));
7155 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7156 if (TREE_CODE (arg
) == NEGATE_EXPR
7157 || TREE_CODE (arg
) == CONJ_EXPR
)
7158 return build_call_expr (fndecl
, 1, TREE_OPERAND (arg
, 0));
7160 /* Don't do this when optimizing for size. */
7161 if (flag_unsafe_math_optimizations
7162 && optimize
&& !optimize_size
)
7164 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
7166 if (sqrtfn
!= NULL_TREE
)
7168 tree rpart
, ipart
, result
;
7170 arg
= builtin_save_expr (arg
);
7172 rpart
= fold_build1 (REALPART_EXPR
, type
, arg
);
7173 ipart
= fold_build1 (IMAGPART_EXPR
, type
, arg
);
7175 rpart
= builtin_save_expr (rpart
);
7176 ipart
= builtin_save_expr (ipart
);
7178 result
= fold_build2 (PLUS_EXPR
, type
,
7179 fold_build2 (MULT_EXPR
, type
,
7181 fold_build2 (MULT_EXPR
, type
,
7184 return build_call_expr (sqrtfn
, 1, result
);
7191 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7192 Return NULL_TREE if no simplification can be made. */
7195 fold_builtin_sqrt (tree arg
, tree type
)
7198 enum built_in_function fcode
;
7201 if (!validate_arg (arg
, REAL_TYPE
))
7204 /* Calculate the result when the argument is a constant. */
7205 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_sqrt
, &dconst0
, NULL
, true)))
7208 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7209 fcode
= builtin_mathfn_code (arg
);
7210 if (flag_unsafe_math_optimizations
&& BUILTIN_EXPONENT_P (fcode
))
7212 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7213 arg
= fold_build2 (MULT_EXPR
, type
,
7214 CALL_EXPR_ARG (arg
, 0),
7215 build_real (type
, dconsthalf
));
7216 return build_call_expr (expfn
, 1, arg
);
7219 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7220 if (flag_unsafe_math_optimizations
&& BUILTIN_ROOT_P (fcode
))
7222 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7226 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7228 /* The inner root was either sqrt or cbrt. */
7229 REAL_VALUE_TYPE dconstroot
=
7230 BUILTIN_SQRT_P (fcode
) ? dconsthalf
: dconstthird
;
7232 /* Adjust for the outer root. */
7233 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7234 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7235 tree_root
= build_real (type
, dconstroot
);
7236 return build_call_expr (powfn
, 2, arg0
, tree_root
);
7240 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7241 if (flag_unsafe_math_optimizations
7242 && (fcode
== BUILT_IN_POW
7243 || fcode
== BUILT_IN_POWF
7244 || fcode
== BUILT_IN_POWL
))
7246 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7247 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7248 tree arg1
= CALL_EXPR_ARG (arg
, 1);
7250 if (!tree_expr_nonnegative_p (arg0
))
7251 arg0
= build1 (ABS_EXPR
, type
, arg0
);
7252 narg1
= fold_build2 (MULT_EXPR
, type
, arg1
,
7253 build_real (type
, dconsthalf
));
7254 return build_call_expr (powfn
, 2, arg0
, narg1
);
7260 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7261 Return NULL_TREE if no simplification can be made. */
7264 fold_builtin_cbrt (tree arg
, tree type
)
7266 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7269 if (!validate_arg (arg
, REAL_TYPE
))
7272 /* Calculate the result when the argument is a constant. */
7273 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cbrt
, NULL
, NULL
, 0)))
7276 if (flag_unsafe_math_optimizations
)
7278 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7279 if (BUILTIN_EXPONENT_P (fcode
))
7281 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7282 const REAL_VALUE_TYPE third_trunc
=
7283 real_value_truncate (TYPE_MODE (type
), dconstthird
);
7284 arg
= fold_build2 (MULT_EXPR
, type
,
7285 CALL_EXPR_ARG (arg
, 0),
7286 build_real (type
, third_trunc
));
7287 return build_call_expr (expfn
, 1, arg
);
7290 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7291 if (BUILTIN_SQRT_P (fcode
))
7293 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7297 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7299 REAL_VALUE_TYPE dconstroot
= dconstthird
;
7301 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
7302 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7303 tree_root
= build_real (type
, dconstroot
);
7304 return build_call_expr (powfn
, 2, arg0
, tree_root
);
7308 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7309 if (BUILTIN_CBRT_P (fcode
))
7311 tree arg0
= CALL_EXPR_ARG (arg
, 0);
7312 if (tree_expr_nonnegative_p (arg0
))
7314 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
7319 REAL_VALUE_TYPE dconstroot
;
7321 real_arithmetic (&dconstroot
, MULT_EXPR
, &dconstthird
, &dconstthird
);
7322 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
7323 tree_root
= build_real (type
, dconstroot
);
7324 return build_call_expr (powfn
, 2, arg0
, tree_root
);
7329 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7330 if (fcode
== BUILT_IN_POW
7331 || fcode
== BUILT_IN_POWF
7332 || fcode
== BUILT_IN_POWL
)
7334 tree arg00
= CALL_EXPR_ARG (arg
, 0);
7335 tree arg01
= CALL_EXPR_ARG (arg
, 1);
7336 if (tree_expr_nonnegative_p (arg00
))
7338 tree powfn
= TREE_OPERAND (CALL_EXPR_FN (arg
), 0);
7339 const REAL_VALUE_TYPE dconstroot
7340 = real_value_truncate (TYPE_MODE (type
), dconstthird
);
7341 tree narg01
= fold_build2 (MULT_EXPR
, type
, arg01
,
7342 build_real (type
, dconstroot
));
7343 return build_call_expr (powfn
, 2, arg00
, narg01
);
7350 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7351 TYPE is the type of the return value. Return NULL_TREE if no
7352 simplification can be made. */
7355 fold_builtin_cos (tree arg
, tree type
, tree fndecl
)
7359 if (!validate_arg (arg
, REAL_TYPE
))
7362 /* Calculate the result when the argument is a constant. */
7363 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cos
, NULL
, NULL
, 0)))
7366 /* Optimize cos(-x) into cos (x). */
7367 if ((narg
= fold_strip_sign_ops (arg
)))
7368 return build_call_expr (fndecl
, 1, narg
);
7373 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7374 Return NULL_TREE if no simplification can be made. */
7377 fold_builtin_cosh (tree arg
, tree type
, tree fndecl
)
7379 if (validate_arg (arg
, REAL_TYPE
))
7383 /* Calculate the result when the argument is a constant. */
7384 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_cosh
, NULL
, NULL
, 0)))
7387 /* Optimize cosh(-x) into cosh (x). */
7388 if ((narg
= fold_strip_sign_ops (arg
)))
7389 return build_call_expr (fndecl
, 1, narg
);
7395 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7396 Return NULL_TREE if no simplification can be made. */
7399 fold_builtin_tan (tree arg
, tree type
)
7401 enum built_in_function fcode
;
7404 if (!validate_arg (arg
, REAL_TYPE
))
7407 /* Calculate the result when the argument is a constant. */
7408 if ((res
= do_mpfr_arg1 (arg
, type
, mpfr_tan
, NULL
, NULL
, 0)))
7411 /* Optimize tan(atan(x)) = x. */
7412 fcode
= builtin_mathfn_code (arg
);
7413 if (flag_unsafe_math_optimizations
7414 && (fcode
== BUILT_IN_ATAN
7415 || fcode
== BUILT_IN_ATANF
7416 || fcode
== BUILT_IN_ATANL
))
7417 return CALL_EXPR_ARG (arg
, 0);
7422 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7423 NULL_TREE if no simplification can be made. */
7426 fold_builtin_sincos (tree arg0
, tree arg1
, tree arg2
)
7431 if (!validate_arg (arg0
, REAL_TYPE
)
7432 || !validate_arg (arg1
, POINTER_TYPE
)
7433 || !validate_arg (arg2
, POINTER_TYPE
))
7436 type
= TREE_TYPE (arg0
);
7438 /* Calculate the result when the argument is a constant. */
7439 if ((res
= do_mpfr_sincos (arg0
, arg1
, arg2
)))
7442 /* Canonicalize sincos to cexpi. */
7443 if (!TARGET_C99_FUNCTIONS
)
7445 fn
= mathfn_built_in (type
, BUILT_IN_CEXPI
);
7449 call
= build_call_expr (fn
, 1, arg0
);
7450 call
= builtin_save_expr (call
);
7452 return build2 (COMPOUND_EXPR
, type
,
7453 build2 (MODIFY_EXPR
, void_type_node
,
7454 build_fold_indirect_ref (arg1
),
7455 build1 (IMAGPART_EXPR
, type
, call
)),
7456 build2 (MODIFY_EXPR
, void_type_node
,
7457 build_fold_indirect_ref (arg2
),
7458 build1 (REALPART_EXPR
, type
, call
)));
7461 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7462 NULL_TREE if no simplification can be made. */
7465 fold_builtin_cexp (tree arg0
, tree type
)
7468 tree realp
, imagp
, ifn
;
7470 if (!validate_arg (arg0
, COMPLEX_TYPE
))
7473 rtype
= TREE_TYPE (TREE_TYPE (arg0
));
7475 /* In case we can figure out the real part of arg0 and it is constant zero
7477 if (!TARGET_C99_FUNCTIONS
)
7479 ifn
= mathfn_built_in (rtype
, BUILT_IN_CEXPI
);
7483 if ((realp
= fold_unary (REALPART_EXPR
, rtype
, arg0
))
7484 && real_zerop (realp
))
7486 tree narg
= fold_build1 (IMAGPART_EXPR
, rtype
, arg0
);
7487 return build_call_expr (ifn
, 1, narg
);
7490 /* In case we can easily decompose real and imaginary parts split cexp
7491 to exp (r) * cexpi (i). */
7492 if (flag_unsafe_math_optimizations
7495 tree rfn
, rcall
, icall
;
7497 rfn
= mathfn_built_in (rtype
, BUILT_IN_EXP
);
7501 imagp
= fold_unary (IMAGPART_EXPR
, rtype
, arg0
);
7505 icall
= build_call_expr (ifn
, 1, imagp
);
7506 icall
= builtin_save_expr (icall
);
7507 rcall
= build_call_expr (rfn
, 1, realp
);
7508 rcall
= builtin_save_expr (rcall
);
7509 return build2 (COMPLEX_EXPR
, type
,
7510 build2 (MULT_EXPR
, rtype
,
7512 build1 (REALPART_EXPR
, rtype
, icall
)),
7513 build2 (MULT_EXPR
, rtype
,
7515 build1 (IMAGPART_EXPR
, rtype
, icall
)));
7521 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7522 Return NULL_TREE if no simplification can be made. */
7525 fold_builtin_trunc (tree fndecl
, tree arg
)
7527 if (!validate_arg (arg
, REAL_TYPE
))
7530 /* Optimize trunc of constant value. */
7531 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7533 REAL_VALUE_TYPE r
, x
;
7534 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7536 x
= TREE_REAL_CST (arg
);
7537 real_trunc (&r
, TYPE_MODE (type
), &x
);
7538 return build_real (type
, r
);
7541 return fold_trunc_transparent_mathfn (fndecl
, arg
);
7544 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7545 Return NULL_TREE if no simplification can be made. */
7548 fold_builtin_floor (tree fndecl
, tree arg
)
7550 if (!validate_arg (arg
, REAL_TYPE
))
7553 /* Optimize floor of constant value. */
7554 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7558 x
= TREE_REAL_CST (arg
);
7559 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7561 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7564 real_floor (&r
, TYPE_MODE (type
), &x
);
7565 return build_real (type
, r
);
7569 /* Fold floor (x) where x is nonnegative to trunc (x). */
7570 if (tree_expr_nonnegative_p (arg
))
7572 tree truncfn
= mathfn_built_in (TREE_TYPE (arg
), BUILT_IN_TRUNC
);
7574 return build_call_expr (truncfn
, 1, arg
);
7577 return fold_trunc_transparent_mathfn (fndecl
, arg
);
7580 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7581 Return NULL_TREE if no simplification can be made. */
7584 fold_builtin_ceil (tree fndecl
, tree arg
)
7586 if (!validate_arg (arg
, REAL_TYPE
))
7589 /* Optimize ceil of constant value. */
7590 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7594 x
= TREE_REAL_CST (arg
);
7595 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7597 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7600 real_ceil (&r
, TYPE_MODE (type
), &x
);
7601 return build_real (type
, r
);
7605 return fold_trunc_transparent_mathfn (fndecl
, arg
);
7608 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7609 Return NULL_TREE if no simplification can be made. */
7612 fold_builtin_round (tree fndecl
, tree arg
)
7614 if (!validate_arg (arg
, REAL_TYPE
))
7617 /* Optimize round of constant value. */
7618 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7622 x
= TREE_REAL_CST (arg
);
7623 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7625 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7628 real_round (&r
, TYPE_MODE (type
), &x
);
7629 return build_real (type
, r
);
7633 return fold_trunc_transparent_mathfn (fndecl
, arg
);
7636 /* Fold function call to builtin lround, lroundf or lroundl (or the
7637 corresponding long long versions) and other rounding functions. ARG
7638 is the argument to the call. Return NULL_TREE if no simplification
7642 fold_builtin_int_roundingfn (tree fndecl
, tree arg
)
7644 if (!validate_arg (arg
, REAL_TYPE
))
7647 /* Optimize lround of constant value. */
7648 if (TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
7650 const REAL_VALUE_TYPE x
= TREE_REAL_CST (arg
);
7652 if (! REAL_VALUE_ISNAN (x
) && ! REAL_VALUE_ISINF (x
))
7654 tree itype
= TREE_TYPE (TREE_TYPE (fndecl
));
7655 tree ftype
= TREE_TYPE (arg
);
7656 unsigned HOST_WIDE_INT lo2
;
7657 HOST_WIDE_INT hi
, lo
;
7660 switch (DECL_FUNCTION_CODE (fndecl
))
7662 CASE_FLT_FN (BUILT_IN_LFLOOR
):
7663 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7664 real_floor (&r
, TYPE_MODE (ftype
), &x
);
7667 CASE_FLT_FN (BUILT_IN_LCEIL
):
7668 CASE_FLT_FN (BUILT_IN_LLCEIL
):
7669 real_ceil (&r
, TYPE_MODE (ftype
), &x
);
7672 CASE_FLT_FN (BUILT_IN_LROUND
):
7673 CASE_FLT_FN (BUILT_IN_LLROUND
):
7674 real_round (&r
, TYPE_MODE (ftype
), &x
);
7681 REAL_VALUE_TO_INT (&lo
, &hi
, r
);
7682 if (!fit_double_type (lo
, hi
, &lo2
, &hi
, itype
))
7683 return build_int_cst_wide (itype
, lo2
, hi
);
7687 switch (DECL_FUNCTION_CODE (fndecl
))
7689 CASE_FLT_FN (BUILT_IN_LFLOOR
):
7690 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
7691 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7692 if (tree_expr_nonnegative_p (arg
))
7693 return fold_build1 (FIX_TRUNC_EXPR
, TREE_TYPE (TREE_TYPE (fndecl
)),
7699 return fold_fixed_mathfn (fndecl
, arg
);
7702 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7703 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7704 the argument to the call. Return NULL_TREE if no simplification can
7708 fold_builtin_bitop (tree fndecl
, tree arg
)
7710 if (!validate_arg (arg
, INTEGER_TYPE
))
7713 /* Optimize for constant argument. */
7714 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
7716 HOST_WIDE_INT hi
, width
, result
;
7717 unsigned HOST_WIDE_INT lo
;
7720 type
= TREE_TYPE (arg
);
7721 width
= TYPE_PRECISION (type
);
7722 lo
= TREE_INT_CST_LOW (arg
);
7724 /* Clear all the bits that are beyond the type's precision. */
7725 if (width
> HOST_BITS_PER_WIDE_INT
)
7727 hi
= TREE_INT_CST_HIGH (arg
);
7728 if (width
< 2 * HOST_BITS_PER_WIDE_INT
)
7729 hi
&= ~((HOST_WIDE_INT
) (-1) >> (width
- HOST_BITS_PER_WIDE_INT
));
7734 if (width
< HOST_BITS_PER_WIDE_INT
)
7735 lo
&= ~((unsigned HOST_WIDE_INT
) (-1) << width
);
7738 switch (DECL_FUNCTION_CODE (fndecl
))
7740 CASE_INT_FN (BUILT_IN_FFS
):
7742 result
= exact_log2 (lo
& -lo
) + 1;
7744 result
= HOST_BITS_PER_WIDE_INT
+ exact_log2 (hi
& -hi
) + 1;
7749 CASE_INT_FN (BUILT_IN_CLZ
):
7751 result
= width
- floor_log2 (hi
) - 1 - HOST_BITS_PER_WIDE_INT
;
7753 result
= width
- floor_log2 (lo
) - 1;
7754 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
7758 CASE_INT_FN (BUILT_IN_CTZ
):
7760 result
= exact_log2 (lo
& -lo
);
7762 result
= HOST_BITS_PER_WIDE_INT
+ exact_log2 (hi
& -hi
);
7763 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
7767 CASE_INT_FN (BUILT_IN_POPCOUNT
):
7770 result
++, lo
&= lo
- 1;
7772 result
++, hi
&= hi
- 1;
7775 CASE_INT_FN (BUILT_IN_PARITY
):
7778 result
++, lo
&= lo
- 1;
7780 result
++, hi
&= hi
- 1;
7788 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
7794 /* Fold function call to builtin_bswap and the long and long long
7795 variants. Return NULL_TREE if no simplification can be made. */
7797 fold_builtin_bswap (tree fndecl
, tree arg
)
7799 if (! validate_arg (arg
, INTEGER_TYPE
))
7802 /* Optimize constant value. */
7803 if (TREE_CODE (arg
) == INTEGER_CST
&& !TREE_OVERFLOW (arg
))
7805 HOST_WIDE_INT hi
, width
, r_hi
= 0;
7806 unsigned HOST_WIDE_INT lo
, r_lo
= 0;
7809 type
= TREE_TYPE (arg
);
7810 width
= TYPE_PRECISION (type
);
7811 lo
= TREE_INT_CST_LOW (arg
);
7812 hi
= TREE_INT_CST_HIGH (arg
);
7814 switch (DECL_FUNCTION_CODE (fndecl
))
7816 case BUILT_IN_BSWAP32
:
7817 case BUILT_IN_BSWAP64
:
7821 for (s
= 0; s
< width
; s
+= 8)
7823 int d
= width
- s
- 8;
7824 unsigned HOST_WIDE_INT byte
;
7826 if (s
< HOST_BITS_PER_WIDE_INT
)
7827 byte
= (lo
>> s
) & 0xff;
7829 byte
= (hi
>> (s
- HOST_BITS_PER_WIDE_INT
)) & 0xff;
7831 if (d
< HOST_BITS_PER_WIDE_INT
)
7834 r_hi
|= byte
<< (d
- HOST_BITS_PER_WIDE_INT
);
7844 if (width
< HOST_BITS_PER_WIDE_INT
)
7845 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), r_lo
);
7847 return build_int_cst_wide (TREE_TYPE (TREE_TYPE (fndecl
)), r_lo
, r_hi
);
7853 /* Return true if EXPR is the real constant contained in VALUE. */
7856 real_dconstp (tree expr
, const REAL_VALUE_TYPE
*value
)
7860 return ((TREE_CODE (expr
) == REAL_CST
7861 && !TREE_OVERFLOW (expr
)
7862 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr
), *value
))
7863 || (TREE_CODE (expr
) == COMPLEX_CST
7864 && real_dconstp (TREE_REALPART (expr
), value
)
7865 && real_zerop (TREE_IMAGPART (expr
))));
7868 /* A subroutine of fold_builtin to fold the various logarithmic
7869 functions. Return NULL_TREE if no simplification can me made.
7870 FUNC is the corresponding MPFR logarithm function. */
7873 fold_builtin_logarithm (tree fndecl
, tree arg
,
7874 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
7876 if (validate_arg (arg
, REAL_TYPE
))
7878 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7880 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7882 /* Optimize log(e) = 1.0. We're never passed an exact 'e',
7883 instead we'll look for 'e' truncated to MODE. So only do
7884 this if flag_unsafe_math_optimizations is set. */
7885 if (flag_unsafe_math_optimizations
&& func
== mpfr_log
)
7887 const REAL_VALUE_TYPE e_truncated
=
7888 real_value_truncate (TYPE_MODE (type
), dconste
);
7889 if (real_dconstp (arg
, &e_truncated
))
7890 return build_real (type
, dconst1
);
7893 /* Calculate the result when the argument is a constant. */
7894 if ((res
= do_mpfr_arg1 (arg
, type
, func
, &dconst0
, NULL
, false)))
7897 /* Special case, optimize logN(expN(x)) = x. */
7898 if (flag_unsafe_math_optimizations
7899 && ((func
== mpfr_log
7900 && (fcode
== BUILT_IN_EXP
7901 || fcode
== BUILT_IN_EXPF
7902 || fcode
== BUILT_IN_EXPL
))
7903 || (func
== mpfr_log2
7904 && (fcode
== BUILT_IN_EXP2
7905 || fcode
== BUILT_IN_EXP2F
7906 || fcode
== BUILT_IN_EXP2L
))
7907 || (func
== mpfr_log10
&& (BUILTIN_EXP10_P (fcode
)))))
7908 return fold_convert (type
, CALL_EXPR_ARG (arg
, 0));
7910 /* Optimize logN(func()) for various exponential functions. We
7911 want to determine the value "x" and the power "exponent" in
7912 order to transform logN(x**exponent) into exponent*logN(x). */
7913 if (flag_unsafe_math_optimizations
)
7915 tree exponent
= 0, x
= 0;
7919 CASE_FLT_FN (BUILT_IN_EXP
):
7920 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7921 x
= build_real (type
,
7922 real_value_truncate (TYPE_MODE (type
), dconste
));
7923 exponent
= CALL_EXPR_ARG (arg
, 0);
7925 CASE_FLT_FN (BUILT_IN_EXP2
):
7926 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7927 x
= build_real (type
, dconst2
);
7928 exponent
= CALL_EXPR_ARG (arg
, 0);
7930 CASE_FLT_FN (BUILT_IN_EXP10
):
7931 CASE_FLT_FN (BUILT_IN_POW10
):
7932 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7933 x
= build_real (type
, dconst10
);
7934 exponent
= CALL_EXPR_ARG (arg
, 0);
7936 CASE_FLT_FN (BUILT_IN_SQRT
):
7937 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7938 x
= CALL_EXPR_ARG (arg
, 0);
7939 exponent
= build_real (type
, dconsthalf
);
7941 CASE_FLT_FN (BUILT_IN_CBRT
):
7942 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7943 x
= CALL_EXPR_ARG (arg
, 0);
7944 exponent
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
7947 CASE_FLT_FN (BUILT_IN_POW
):
7948 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7949 x
= CALL_EXPR_ARG (arg
, 0);
7950 exponent
= CALL_EXPR_ARG (arg
, 1);
7956 /* Now perform the optimization. */
7959 tree logfn
= build_call_expr (fndecl
, 1, x
);
7960 return fold_build2 (MULT_EXPR
, type
, exponent
, logfn
);
7968 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7969 NULL_TREE if no simplification can be made. */
7972 fold_builtin_hypot (tree fndecl
, tree arg0
, tree arg1
, tree type
)
7974 tree res
, narg0
, narg1
;
7976 if (!validate_arg (arg0
, REAL_TYPE
)
7977 || !validate_arg (arg1
, REAL_TYPE
))
7980 /* Calculate the result when the argument is a constant. */
7981 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_hypot
)))
7984 /* If either argument to hypot has a negate or abs, strip that off.
7985 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
7986 narg0
= fold_strip_sign_ops (arg0
);
7987 narg1
= fold_strip_sign_ops (arg1
);
7990 return build_call_expr (fndecl
, 2, narg0
? narg0
: arg0
,
7991 narg1
? narg1
: arg1
);
7994 /* If either argument is zero, hypot is fabs of the other. */
7995 if (real_zerop (arg0
))
7996 return fold_build1 (ABS_EXPR
, type
, arg1
);
7997 else if (real_zerop (arg1
))
7998 return fold_build1 (ABS_EXPR
, type
, arg0
);
8000 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8001 if (flag_unsafe_math_optimizations
8002 && operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8004 const REAL_VALUE_TYPE sqrt2_trunc
8005 = real_value_truncate (TYPE_MODE (type
), dconstsqrt2
);
8006 return fold_build2 (MULT_EXPR
, type
,
8007 fold_build1 (ABS_EXPR
, type
, arg0
),
8008 build_real (type
, sqrt2_trunc
));
8015 /* Fold a builtin function call to pow, powf, or powl. Return
8016 NULL_TREE if no simplification can be made. */
8018 fold_builtin_pow (tree fndecl
, tree arg0
, tree arg1
, tree type
)
8022 if (!validate_arg (arg0
, REAL_TYPE
)
8023 || !validate_arg (arg1
, REAL_TYPE
))
8026 /* Calculate the result when the argument is a constant. */
8027 if ((res
= do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_pow
)))
8030 /* Optimize pow(1.0,y) = 1.0. */
8031 if (real_onep (arg0
))
8032 return omit_one_operand (type
, build_real (type
, dconst1
), arg1
);
8034 if (TREE_CODE (arg1
) == REAL_CST
8035 && !TREE_OVERFLOW (arg1
))
8037 REAL_VALUE_TYPE cint
;
8041 c
= TREE_REAL_CST (arg1
);
8043 /* Optimize pow(x,0.0) = 1.0. */
8044 if (REAL_VALUES_EQUAL (c
, dconst0
))
8045 return omit_one_operand (type
, build_real (type
, dconst1
),
8048 /* Optimize pow(x,1.0) = x. */
8049 if (REAL_VALUES_EQUAL (c
, dconst1
))
8052 /* Optimize pow(x,-1.0) = 1.0/x. */
8053 if (REAL_VALUES_EQUAL (c
, dconstm1
))
8054 return fold_build2 (RDIV_EXPR
, type
,
8055 build_real (type
, dconst1
), arg0
);
8057 /* Optimize pow(x,0.5) = sqrt(x). */
8058 if (flag_unsafe_math_optimizations
8059 && REAL_VALUES_EQUAL (c
, dconsthalf
))
8061 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
8063 if (sqrtfn
!= NULL_TREE
)
8064 return build_call_expr (sqrtfn
, 1, arg0
);
8067 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8068 if (flag_unsafe_math_optimizations
)
8070 const REAL_VALUE_TYPE dconstroot
8071 = real_value_truncate (TYPE_MODE (type
), dconstthird
);
8073 if (REAL_VALUES_EQUAL (c
, dconstroot
))
8075 tree cbrtfn
= mathfn_built_in (type
, BUILT_IN_CBRT
);
8076 if (cbrtfn
!= NULL_TREE
)
8077 return build_call_expr (cbrtfn
, 1, arg0
);
8081 /* Check for an integer exponent. */
8082 n
= real_to_integer (&c
);
8083 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
8084 if (real_identical (&c
, &cint
))
8086 /* Attempt to evaluate pow at compile-time. */
8087 if (TREE_CODE (arg0
) == REAL_CST
8088 && !TREE_OVERFLOW (arg0
))
8093 x
= TREE_REAL_CST (arg0
);
8094 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
8095 if (flag_unsafe_math_optimizations
|| !inexact
)
8096 return build_real (type
, x
);
8099 /* Strip sign ops from even integer powers. */
8100 if ((n
& 1) == 0 && flag_unsafe_math_optimizations
)
8102 tree narg0
= fold_strip_sign_ops (arg0
);
8104 return build_call_expr (fndecl
, 2, narg0
, arg1
);
8109 if (flag_unsafe_math_optimizations
)
8111 const enum built_in_function fcode
= builtin_mathfn_code (arg0
);
8113 /* Optimize pow(expN(x),y) = expN(x*y). */
8114 if (BUILTIN_EXPONENT_P (fcode
))
8116 tree expfn
= TREE_OPERAND (CALL_EXPR_FN (arg0
), 0);
8117 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8118 arg
= fold_build2 (MULT_EXPR
, type
, arg
, arg1
);
8119 return build_call_expr (expfn
, 1, arg
);
8122 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8123 if (BUILTIN_SQRT_P (fcode
))
8125 tree narg0
= CALL_EXPR_ARG (arg0
, 0);
8126 tree narg1
= fold_build2 (MULT_EXPR
, type
, arg1
,
8127 build_real (type
, dconsthalf
));
8128 return build_call_expr (fndecl
, 2, narg0
, narg1
);
8131 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8132 if (BUILTIN_CBRT_P (fcode
))
8134 tree arg
= CALL_EXPR_ARG (arg0
, 0);
8135 if (tree_expr_nonnegative_p (arg
))
8137 const REAL_VALUE_TYPE dconstroot
8138 = real_value_truncate (TYPE_MODE (type
), dconstthird
);
8139 tree narg1
= fold_build2 (MULT_EXPR
, type
, arg1
,
8140 build_real (type
, dconstroot
));
8141 return build_call_expr (fndecl
, 2, arg
, narg1
);
8145 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
8146 if (fcode
== BUILT_IN_POW
8147 || fcode
== BUILT_IN_POWF
8148 || fcode
== BUILT_IN_POWL
)
8150 tree arg00
= CALL_EXPR_ARG (arg0
, 0);
8151 tree arg01
= CALL_EXPR_ARG (arg0
, 1);
8152 tree narg1
= fold_build2 (MULT_EXPR
, type
, arg01
, arg1
);
8153 return build_call_expr (fndecl
, 2, arg00
, narg1
);
8160 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8161 Return NULL_TREE if no simplification can be made. */
8163 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED
,
8164 tree arg0
, tree arg1
, tree type
)
8166 if (!validate_arg (arg0
, REAL_TYPE
)
8167 || !validate_arg (arg1
, INTEGER_TYPE
))
8170 /* Optimize pow(1.0,y) = 1.0. */
8171 if (real_onep (arg0
))
8172 return omit_one_operand (type
, build_real (type
, dconst1
), arg1
);
8174 if (host_integerp (arg1
, 0))
8176 HOST_WIDE_INT c
= TREE_INT_CST_LOW (arg1
);
8178 /* Evaluate powi at compile-time. */
8179 if (TREE_CODE (arg0
) == REAL_CST
8180 && !TREE_OVERFLOW (arg0
))
8183 x
= TREE_REAL_CST (arg0
);
8184 real_powi (&x
, TYPE_MODE (type
), &x
, c
);
8185 return build_real (type
, x
);
8188 /* Optimize pow(x,0) = 1.0. */
8190 return omit_one_operand (type
, build_real (type
, dconst1
),
8193 /* Optimize pow(x,1) = x. */
8197 /* Optimize pow(x,-1) = 1.0/x. */
8199 return fold_build2 (RDIV_EXPR
, type
,
8200 build_real (type
, dconst1
), arg0
);
8206 /* A subroutine of fold_builtin to fold the various exponent
8207 functions. Return NULL_TREE if no simplification can be made.
8208 FUNC is the corresponding MPFR exponent function. */
8211 fold_builtin_exponent (tree fndecl
, tree arg
,
8212 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
))
8214 if (validate_arg (arg
, REAL_TYPE
))
8216 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8219 /* Calculate the result when the argument is a constant. */
8220 if ((res
= do_mpfr_arg1 (arg
, type
, func
, NULL
, NULL
, 0)))
8223 /* Optimize expN(logN(x)) = x. */
8224 if (flag_unsafe_math_optimizations
)
8226 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
8228 if ((func
== mpfr_exp
8229 && (fcode
== BUILT_IN_LOG
8230 || fcode
== BUILT_IN_LOGF
8231 || fcode
== BUILT_IN_LOGL
))
8232 || (func
== mpfr_exp2
8233 && (fcode
== BUILT_IN_LOG2
8234 || fcode
== BUILT_IN_LOG2F
8235 || fcode
== BUILT_IN_LOG2L
))
8236 || (func
== mpfr_exp10
8237 && (fcode
== BUILT_IN_LOG10
8238 || fcode
== BUILT_IN_LOG10F
8239 || fcode
== BUILT_IN_LOG10L
)))
8240 return fold_convert (type
, CALL_EXPR_ARG (arg
, 0));
8247 /* Return true if VAR is a VAR_DECL or a component thereof. */
8250 var_decl_component_p (tree var
)
8253 while (handled_component_p (inner
))
8254 inner
= TREE_OPERAND (inner
, 0);
8255 return SSA_VAR_P (inner
);
8258 /* Fold function call to builtin memset. Return
8259 NULL_TREE if no simplification can be made. */
8262 fold_builtin_memset (tree dest
, tree c
, tree len
, tree type
, bool ignore
)
8265 unsigned HOST_WIDE_INT length
, cval
;
8267 if (! validate_arg (dest
, POINTER_TYPE
)
8268 || ! validate_arg (c
, INTEGER_TYPE
)
8269 || ! validate_arg (len
, INTEGER_TYPE
))
8272 if (! host_integerp (len
, 1))
8275 /* If the LEN parameter is zero, return DEST. */
8276 if (integer_zerop (len
))
8277 return omit_one_operand (type
, dest
, c
);
8279 if (! host_integerp (c
, 1) || TREE_SIDE_EFFECTS (dest
))
8284 if (TREE_CODE (var
) != ADDR_EXPR
)
8287 var
= TREE_OPERAND (var
, 0);
8288 if (TREE_THIS_VOLATILE (var
))
8291 if (!INTEGRAL_TYPE_P (TREE_TYPE (var
))
8292 && !POINTER_TYPE_P (TREE_TYPE (var
)))
8295 if (! var_decl_component_p (var
))
8298 length
= tree_low_cst (len
, 1);
8299 if (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (var
))) != length
8300 || get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
8304 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
8307 if (integer_zerop (c
))
8311 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
8314 cval
= tree_low_cst (c
, 1);
8318 cval
|= (cval
<< 31) << 1;
8321 ret
= build_int_cst_type (TREE_TYPE (var
), cval
);
8322 ret
= build2 (MODIFY_EXPR
, TREE_TYPE (var
), var
, ret
);
8326 return omit_one_operand (type
, dest
, ret
);
8329 /* Fold function call to builtin memset. Return
8330 NULL_TREE if no simplification can be made. */
8333 fold_builtin_bzero (tree dest
, tree size
, bool ignore
)
8335 if (! validate_arg (dest
, POINTER_TYPE
)
8336 || ! validate_arg (size
, INTEGER_TYPE
))
8342 /* New argument list transforming bzero(ptr x, int y) to
8343 memset(ptr x, int 0, size_t y). This is done this way
8344 so that if it isn't expanded inline, we fallback to
8345 calling bzero instead of memset. */
8347 return fold_builtin_memset (dest
, integer_zero_node
,
8348 fold_convert (sizetype
, size
),
8349 void_type_node
, ignore
);
8352 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8353 NULL_TREE if no simplification can be made.
8354 If ENDP is 0, return DEST (like memcpy).
8355 If ENDP is 1, return DEST+LEN (like mempcpy).
8356 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8357 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8361 fold_builtin_memory_op (tree dest
, tree src
, tree len
, tree type
, bool ignore
, int endp
)
8363 tree destvar
, srcvar
, expr
;
8365 if (! validate_arg (dest
, POINTER_TYPE
)
8366 || ! validate_arg (src
, POINTER_TYPE
)
8367 || ! validate_arg (len
, INTEGER_TYPE
))
8370 /* If the LEN parameter is zero, return DEST. */
8371 if (integer_zerop (len
))
8372 return omit_one_operand (type
, dest
, src
);
8374 /* If SRC and DEST are the same (and not volatile), return
8375 DEST{,+LEN,+LEN-1}. */
8376 if (operand_equal_p (src
, dest
, 0))
8380 tree srctype
, desttype
;
8383 int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
8384 int dest_align
= get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
8386 /* Both DEST and SRC must be pointer types.
8387 ??? This is what old code did. Is the testing for pointer types
8390 If either SRC is readonly or length is 1, we can use memcpy. */
8391 if (dest_align
&& src_align
8392 && (readonly_data_expr (src
)
8393 || (host_integerp (len
, 1)
8394 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
>=
8395 tree_low_cst (len
, 1)))))
8397 tree fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
8400 return build_call_expr (fn
, 3, dest
, src
, len
);
8405 if (!host_integerp (len
, 0))
8408 This logic lose for arguments like (type *)malloc (sizeof (type)),
8409 since we strip the casts of up to VOID return value from malloc.
8410 Perhaps we ought to inherit type from non-VOID argument here? */
8413 srctype
= TREE_TYPE (TREE_TYPE (src
));
8414 desttype
= TREE_TYPE (TREE_TYPE (dest
));
8415 if (!srctype
|| !desttype
8416 || !TYPE_SIZE_UNIT (srctype
)
8417 || !TYPE_SIZE_UNIT (desttype
)
8418 || TREE_CODE (TYPE_SIZE_UNIT (srctype
)) != INTEGER_CST
8419 || TREE_CODE (TYPE_SIZE_UNIT (desttype
)) != INTEGER_CST
8420 || !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
)
8421 || !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
8424 if (get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
)
8425 < (int) TYPE_ALIGN (desttype
)
8426 || (get_pointer_alignment (src
, BIGGEST_ALIGNMENT
)
8427 < (int) TYPE_ALIGN (srctype
)))
8431 dest
= builtin_save_expr (dest
);
8433 srcvar
= build_fold_indirect_ref (src
);
8434 if (TREE_THIS_VOLATILE (srcvar
))
8436 if (!tree_int_cst_equal (lang_hooks
.expr_size (srcvar
), len
))
8438 /* With memcpy, it is possible to bypass aliasing rules, so without
8439 this check i. e. execute/20060930-2.c would be misoptimized, because
8440 it use conflicting alias set to hold argument for the memcpy call.
8441 This check is probably unnecesary with -fno-strict-aliasing.
8442 Similarly for destvar. See also PR29286. */
8443 if (!var_decl_component_p (srcvar
)
8444 /* Accept: memcpy (*char_var, "test", 1); that simplify
8446 || is_gimple_min_invariant (srcvar
)
8447 || readonly_data_expr (src
))
8450 destvar
= build_fold_indirect_ref (dest
);
8451 if (TREE_THIS_VOLATILE (destvar
))
8453 if (!tree_int_cst_equal (lang_hooks
.expr_size (destvar
), len
))
8455 if (!var_decl_component_p (destvar
))
8458 if (srctype
== desttype
8459 || (gimple_in_ssa_p (cfun
)
8460 && tree_ssa_useless_type_conversion_1 (desttype
, srctype
)))
8462 else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar
))
8463 || POINTER_TYPE_P (TREE_TYPE (srcvar
)))
8464 && (INTEGRAL_TYPE_P (TREE_TYPE (destvar
))
8465 || POINTER_TYPE_P (TREE_TYPE (destvar
))))
8466 expr
= fold_convert (TREE_TYPE (destvar
), srcvar
);
8468 expr
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (destvar
), srcvar
);
8469 expr
= build2 (MODIFY_EXPR
, TREE_TYPE (destvar
), destvar
, expr
);
8475 if (endp
== 0 || endp
== 3)
8476 return omit_one_operand (type
, dest
, expr
);
8482 len
= fold_build2 (MINUS_EXPR
, TREE_TYPE (len
), len
,
8485 len
= fold_convert (TREE_TYPE (dest
), len
);
8486 dest
= fold_build2 (PLUS_EXPR
, TREE_TYPE (dest
), dest
, len
);
8487 dest
= fold_convert (type
, dest
);
8489 dest
= omit_one_operand (type
, dest
, expr
);
8493 /* Fold function call to builtin strcpy with arguments DEST and SRC.
8494 If LEN is not NULL, it represents the length of the string to be
8495 copied. Return NULL_TREE if no simplification can be made. */
8498 fold_builtin_strcpy (tree fndecl
, tree dest
, tree src
, tree len
)
8502 if (!validate_arg (dest
, POINTER_TYPE
)
8503 || !validate_arg (src
, POINTER_TYPE
))
8506 /* If SRC and DEST are the same (and not volatile), return DEST. */
8507 if (operand_equal_p (src
, dest
, 0))
8508 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
8513 fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
8519 len
= c_strlen (src
, 1);
8520 if (! len
|| TREE_SIDE_EFFECTS (len
))
8524 len
= size_binop (PLUS_EXPR
, len
, ssize_int (1));
8525 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)),
8526 build_call_expr (fn
, 3, dest
, src
, len
));
8529 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
8530 If SLEN is not NULL, it represents the length of the source string.
8531 Return NULL_TREE if no simplification can be made. */
8534 fold_builtin_strncpy (tree fndecl
, tree dest
, tree src
, tree len
, tree slen
)
8538 if (!validate_arg (dest
, POINTER_TYPE
)
8539 || !validate_arg (src
, POINTER_TYPE
)
8540 || !validate_arg (len
, INTEGER_TYPE
))
8543 /* If the LEN parameter is zero, return DEST. */
8544 if (integer_zerop (len
))
8545 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
8547 /* We can't compare slen with len as constants below if len is not a
8549 if (len
== 0 || TREE_CODE (len
) != INTEGER_CST
)
8553 slen
= c_strlen (src
, 1);
8555 /* Now, we must be passed a constant src ptr parameter. */
8556 if (slen
== 0 || TREE_CODE (slen
) != INTEGER_CST
)
8559 slen
= size_binop (PLUS_EXPR
, slen
, ssize_int (1));
8561 /* We do not support simplification of this case, though we do
8562 support it when expanding trees into RTL. */
8563 /* FIXME: generate a call to __builtin_memset. */
8564 if (tree_int_cst_lt (slen
, len
))
8567 /* OK transform into builtin memcpy. */
8568 fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
8571 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)),
8572 build_call_expr (fn
, 3, dest
, src
, len
));
8575 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8576 Return NULL_TREE if no simplification can be made. */
8579 fold_builtin_memcmp (tree arg1
, tree arg2
, tree len
)
8581 const char *p1
, *p2
;
8583 if (!validate_arg (arg1
, POINTER_TYPE
)
8584 || !validate_arg (arg2
, POINTER_TYPE
)
8585 || !validate_arg (len
, INTEGER_TYPE
))
8588 /* If the LEN parameter is zero, return zero. */
8589 if (integer_zerop (len
))
8590 return omit_two_operands (integer_type_node
, integer_zero_node
,
8593 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8594 if (operand_equal_p (arg1
, arg2
, 0))
8595 return omit_one_operand (integer_type_node
, integer_zero_node
, len
);
8597 p1
= c_getstr (arg1
);
8598 p2
= c_getstr (arg2
);
8600 /* If all arguments are constant, and the value of len is not greater
8601 than the lengths of arg1 and arg2, evaluate at compile-time. */
8602 if (host_integerp (len
, 1) && p1
&& p2
8603 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
8604 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
8606 const int r
= memcmp (p1
, p2
, tree_low_cst (len
, 1));
8609 return integer_one_node
;
8611 return integer_minus_one_node
;
8613 return integer_zero_node
;
8616 /* If len parameter is one, return an expression corresponding to
8617 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8618 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
8620 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8621 tree cst_uchar_ptr_node
8622 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8624 tree ind1
= fold_convert (integer_type_node
,
8625 build1 (INDIRECT_REF
, cst_uchar_node
,
8626 fold_convert (cst_uchar_ptr_node
,
8628 tree ind2
= fold_convert (integer_type_node
,
8629 build1 (INDIRECT_REF
, cst_uchar_node
,
8630 fold_convert (cst_uchar_ptr_node
,
8632 return fold_build2 (MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8638 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8639 Return NULL_TREE if no simplification can be made. */
8642 fold_builtin_strcmp (tree arg1
, tree arg2
)
8644 const char *p1
, *p2
;
8646 if (!validate_arg (arg1
, POINTER_TYPE
)
8647 || !validate_arg (arg2
, POINTER_TYPE
))
8650 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8651 if (operand_equal_p (arg1
, arg2
, 0))
8652 return integer_zero_node
;
8654 p1
= c_getstr (arg1
);
8655 p2
= c_getstr (arg2
);
8659 const int i
= strcmp (p1
, p2
);
8661 return integer_minus_one_node
;
8663 return integer_one_node
;
8665 return integer_zero_node
;
8668 /* If the second arg is "", return *(const unsigned char*)arg1. */
8669 if (p2
&& *p2
== '\0')
8671 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8672 tree cst_uchar_ptr_node
8673 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8675 return fold_convert (integer_type_node
,
8676 build1 (INDIRECT_REF
, cst_uchar_node
,
8677 fold_convert (cst_uchar_ptr_node
,
8681 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8682 if (p1
&& *p1
== '\0')
8684 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8685 tree cst_uchar_ptr_node
8686 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8688 tree temp
= fold_convert (integer_type_node
,
8689 build1 (INDIRECT_REF
, cst_uchar_node
,
8690 fold_convert (cst_uchar_ptr_node
,
8692 return fold_build1 (NEGATE_EXPR
, integer_type_node
, temp
);
8698 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8699 Return NULL_TREE if no simplification can be made. */
8702 fold_builtin_strncmp (tree arg1
, tree arg2
, tree len
)
8704 const char *p1
, *p2
;
8706 if (!validate_arg (arg1
, POINTER_TYPE
)
8707 || !validate_arg (arg2
, POINTER_TYPE
)
8708 || !validate_arg (len
, INTEGER_TYPE
))
8711 /* If the LEN parameter is zero, return zero. */
8712 if (integer_zerop (len
))
8713 return omit_two_operands (integer_type_node
, integer_zero_node
,
8716 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8717 if (operand_equal_p (arg1
, arg2
, 0))
8718 return omit_one_operand (integer_type_node
, integer_zero_node
, len
);
8720 p1
= c_getstr (arg1
);
8721 p2
= c_getstr (arg2
);
8723 if (host_integerp (len
, 1) && p1
&& p2
)
8725 const int i
= strncmp (p1
, p2
, tree_low_cst (len
, 1));
8727 return integer_one_node
;
8729 return integer_minus_one_node
;
8731 return integer_zero_node
;
8734 /* If the second arg is "", and the length is greater than zero,
8735 return *(const unsigned char*)arg1. */
8736 if (p2
&& *p2
== '\0'
8737 && TREE_CODE (len
) == INTEGER_CST
8738 && tree_int_cst_sgn (len
) == 1)
8740 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8741 tree cst_uchar_ptr_node
8742 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8744 return fold_convert (integer_type_node
,
8745 build1 (INDIRECT_REF
, cst_uchar_node
,
8746 fold_convert (cst_uchar_ptr_node
,
8750 /* If the first arg is "", and the length is greater than zero,
8751 return -*(const unsigned char*)arg2. */
8752 if (p1
&& *p1
== '\0'
8753 && TREE_CODE (len
) == INTEGER_CST
8754 && tree_int_cst_sgn (len
) == 1)
8756 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8757 tree cst_uchar_ptr_node
8758 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8760 tree temp
= fold_convert (integer_type_node
,
8761 build1 (INDIRECT_REF
, cst_uchar_node
,
8762 fold_convert (cst_uchar_ptr_node
,
8764 return fold_build1 (NEGATE_EXPR
, integer_type_node
, temp
);
8767 /* If len parameter is one, return an expression corresponding to
8768 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8769 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
8771 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8772 tree cst_uchar_ptr_node
8773 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
8775 tree ind1
= fold_convert (integer_type_node
,
8776 build1 (INDIRECT_REF
, cst_uchar_node
,
8777 fold_convert (cst_uchar_ptr_node
,
8779 tree ind2
= fold_convert (integer_type_node
,
8780 build1 (INDIRECT_REF
, cst_uchar_node
,
8781 fold_convert (cst_uchar_ptr_node
,
8783 return fold_build2 (MINUS_EXPR
, integer_type_node
, ind1
, ind2
);
8789 /* Fold function call to builtin signbit, signbitf or signbitl with argument
8790 ARG. Return NULL_TREE if no simplification can be made. */
8793 fold_builtin_signbit (tree arg
, tree type
)
8797 if (!validate_arg (arg
, REAL_TYPE
))
8800 /* If ARG is a compile-time constant, determine the result. */
8801 if (TREE_CODE (arg
) == REAL_CST
8802 && !TREE_OVERFLOW (arg
))
8806 c
= TREE_REAL_CST (arg
);
8807 temp
= REAL_VALUE_NEGATIVE (c
) ? integer_one_node
: integer_zero_node
;
8808 return fold_convert (type
, temp
);
8811 /* If ARG is non-negative, the result is always zero. */
8812 if (tree_expr_nonnegative_p (arg
))
8813 return omit_one_operand (type
, integer_zero_node
, arg
);
8815 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8816 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg
))))
8817 return fold_build2 (LT_EXPR
, type
, arg
,
8818 build_real (TREE_TYPE (arg
), dconst0
));
8823 /* Fold function call to builtin copysign, copysignf or copysignl with
8824 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8828 fold_builtin_copysign (tree fndecl
, tree arg1
, tree arg2
, tree type
)
8832 if (!validate_arg (arg1
, REAL_TYPE
)
8833 || !validate_arg (arg2
, REAL_TYPE
))
8836 /* copysign(X,X) is X. */
8837 if (operand_equal_p (arg1
, arg2
, 0))
8838 return fold_convert (type
, arg1
);
8840 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8841 if (TREE_CODE (arg1
) == REAL_CST
8842 && TREE_CODE (arg2
) == REAL_CST
8843 && !TREE_OVERFLOW (arg1
)
8844 && !TREE_OVERFLOW (arg2
))
8846 REAL_VALUE_TYPE c1
, c2
;
8848 c1
= TREE_REAL_CST (arg1
);
8849 c2
= TREE_REAL_CST (arg2
);
8850 /* c1.sign := c2.sign. */
8851 real_copysign (&c1
, &c2
);
8852 return build_real (type
, c1
);
8855 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8856 Remember to evaluate Y for side-effects. */
8857 if (tree_expr_nonnegative_p (arg2
))
8858 return omit_one_operand (type
,
8859 fold_build1 (ABS_EXPR
, type
, arg1
),
8862 /* Strip sign changing operations for the first argument. */
8863 tem
= fold_strip_sign_ops (arg1
);
8865 return build_call_expr (fndecl
, 2, tem
, arg2
);
8870 /* Fold a call to builtin isascii with argument ARG. */
8873 fold_builtin_isascii (tree arg
)
8875 if (!validate_arg (arg
, INTEGER_TYPE
))
8879 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8880 arg
= build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
8881 build_int_cst (NULL_TREE
,
8882 ~ (unsigned HOST_WIDE_INT
) 0x7f));
8883 return fold_build2 (EQ_EXPR
, integer_type_node
,
8884 arg
, integer_zero_node
);
8888 /* Fold a call to builtin toascii with argument ARG. */
8891 fold_builtin_toascii (tree arg
)
8893 if (!validate_arg (arg
, INTEGER_TYPE
))
8896 /* Transform toascii(c) -> (c & 0x7f). */
8897 return fold_build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
8898 build_int_cst (NULL_TREE
, 0x7f));
8901 /* Fold a call to builtin isdigit with argument ARG. */
8904 fold_builtin_isdigit (tree arg
)
8906 if (!validate_arg (arg
, INTEGER_TYPE
))
8910 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8911 /* According to the C standard, isdigit is unaffected by locale.
8912 However, it definitely is affected by the target character set. */
8913 unsigned HOST_WIDE_INT target_digit0
8914 = lang_hooks
.to_target_charset ('0');
8916 if (target_digit0
== 0)
8919 arg
= fold_convert (unsigned_type_node
, arg
);
8920 arg
= build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
8921 build_int_cst (unsigned_type_node
, target_digit0
));
8922 return fold_build2 (LE_EXPR
, integer_type_node
, arg
,
8923 build_int_cst (unsigned_type_node
, 9));
8927 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8930 fold_builtin_fabs (tree arg
, tree type
)
8932 if (!validate_arg (arg
, REAL_TYPE
))
8935 arg
= fold_convert (type
, arg
);
8936 if (TREE_CODE (arg
) == REAL_CST
)
8937 return fold_abs_const (arg
, type
);
8938 return fold_build1 (ABS_EXPR
, type
, arg
);
8941 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8944 fold_builtin_abs (tree arg
, tree type
)
8946 if (!validate_arg (arg
, INTEGER_TYPE
))
8949 arg
= fold_convert (type
, arg
);
8950 if (TREE_CODE (arg
) == INTEGER_CST
)
8951 return fold_abs_const (arg
, type
);
8952 return fold_build1 (ABS_EXPR
, type
, arg
);
8955 /* Fold a call to builtin fmin or fmax. */
8958 fold_builtin_fmin_fmax (tree arg0
, tree arg1
, tree type
, bool max
)
8960 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, REAL_TYPE
))
8962 /* Calculate the result when the argument is a constant. */
8963 tree res
= do_mpfr_arg2 (arg0
, arg1
, type
, (max
? mpfr_max
: mpfr_min
));
8968 /* If either argument is NaN, return the other one. Avoid the
8969 transformation if we get (and honor) a signalling NaN. Using
8970 omit_one_operand() ensures we create a non-lvalue. */
8971 if (TREE_CODE (arg0
) == REAL_CST
8972 && real_isnan (&TREE_REAL_CST (arg0
))
8973 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0
)))
8974 || ! TREE_REAL_CST (arg0
).signalling
))
8975 return omit_one_operand (type
, arg1
, arg0
);
8976 if (TREE_CODE (arg1
) == REAL_CST
8977 && real_isnan (&TREE_REAL_CST (arg1
))
8978 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1
)))
8979 || ! TREE_REAL_CST (arg1
).signalling
))
8980 return omit_one_operand (type
, arg0
, arg1
);
8982 /* Transform fmin/fmax(x,x) -> x. */
8983 if (operand_equal_p (arg0
, arg1
, OEP_PURE_SAME
))
8984 return omit_one_operand (type
, arg0
, arg1
);
8986 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
8987 functions to return the numeric arg if the other one is NaN.
8988 These tree codes don't honor that, so only transform if
8989 -ffinite-math-only is set. C99 doesn't require -0.0 to be
8990 handled, so we don't have to worry about it either. */
8991 if (flag_finite_math_only
)
8992 return fold_build2 ((max
? MAX_EXPR
: MIN_EXPR
), type
,
8993 fold_convert (type
, arg0
),
8994 fold_convert (type
, arg1
));
8999 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9002 fold_builtin_carg (tree arg
, tree type
)
9004 if (validate_arg (arg
, COMPLEX_TYPE
))
9006 tree atan2_fn
= mathfn_built_in (type
, BUILT_IN_ATAN2
);
9010 tree new_arg
= builtin_save_expr (arg
);
9011 tree r_arg
= fold_build1 (REALPART_EXPR
, type
, new_arg
);
9012 tree i_arg
= fold_build1 (IMAGPART_EXPR
, type
, new_arg
);
9013 return build_call_expr (atan2_fn
, 2, i_arg
, r_arg
);
9020 /* Fold a call to builtin logb/ilogb. */
9023 fold_builtin_logb (tree arg
, tree rettype
)
9025 if (! validate_arg (arg
, REAL_TYPE
))
9030 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9032 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9038 /* If arg is Inf or NaN and we're logb, return it. */
9039 if (TREE_CODE (rettype
) == REAL_TYPE
)
9040 return fold_convert (rettype
, arg
);
9041 /* Fall through... */
9043 /* Zero may set errno and/or raise an exception for logb, also
9044 for ilogb we don't know FP_ILOGB0. */
9047 /* For normal numbers, proceed iff radix == 2. In GCC,
9048 normalized significands are in the range [0.5, 1.0). We
9049 want the exponent as if they were [1.0, 2.0) so get the
9050 exponent and subtract 1. */
9051 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9052 return fold_convert (rettype
, build_int_cst (NULL_TREE
,
9053 REAL_EXP (value
)-1));
9061 /* Fold a call to builtin significand, if radix == 2. */
9064 fold_builtin_significand (tree arg
, tree rettype
)
9066 if (! validate_arg (arg
, REAL_TYPE
))
9071 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_OVERFLOW (arg
))
9073 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg
);
9080 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9081 return fold_convert (rettype
, arg
);
9083 /* For normal numbers, proceed iff radix == 2. */
9084 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg
)))->b
== 2)
9086 REAL_VALUE_TYPE result
= *value
;
9087 /* In GCC, normalized significands are in the range [0.5,
9088 1.0). We want them to be [1.0, 2.0) so set the
9090 SET_REAL_EXP (&result
, 1);
9091 return build_real (rettype
, result
);
9100 /* Fold a call to builtin frexp, we can assume the base is 2. */
9103 fold_builtin_frexp (tree arg0
, tree arg1
, tree rettype
)
9105 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9110 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9113 arg1
= build_fold_indirect_ref (arg1
);
9115 /* Proceed if a valid pointer type was passed in. */
9116 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == integer_type_node
)
9118 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9124 /* For +-0, return (*exp = 0, +-0). */
9125 exp
= integer_zero_node
;
9130 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9131 return omit_one_operand (rettype
, arg0
, arg1
);
9134 /* Since the frexp function always expects base 2, and in
9135 GCC normalized significands are already in the range
9136 [0.5, 1.0), we have exactly what frexp wants. */
9137 REAL_VALUE_TYPE frac_rvt
= *value
;
9138 SET_REAL_EXP (&frac_rvt
, 0);
9139 frac
= build_real (rettype
, frac_rvt
);
9140 exp
= build_int_cst (NULL_TREE
, REAL_EXP (value
));
9147 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9148 arg1
= fold_build2 (MODIFY_EXPR
, rettype
, arg1
, exp
);
9149 TREE_SIDE_EFFECTS (arg1
) = 1;
9150 return fold_build2 (COMPOUND_EXPR
, rettype
, arg1
, frac
);
9156 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9157 then we can assume the base is two. If it's false, then we have to
9158 check the mode of the TYPE parameter in certain cases. */
9161 fold_builtin_load_exponent (tree arg0
, tree arg1
, tree type
, bool ldexp
)
9163 if (validate_arg (arg0
, REAL_TYPE
) && validate_arg (arg1
, INTEGER_TYPE
))
9168 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9169 if (real_zerop (arg0
) || integer_zerop (arg1
)
9170 || (TREE_CODE (arg0
) == REAL_CST
9171 && (real_isnan (&TREE_REAL_CST (arg0
))
9172 || real_isinf (&TREE_REAL_CST (arg0
)))))
9173 return omit_one_operand (type
, arg0
, arg1
);
9175 /* If both arguments are constant, then try to evaluate it. */
9176 if ((ldexp
|| REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2)
9177 && TREE_CODE (arg0
) == REAL_CST
&& !TREE_OVERFLOW (arg0
)
9178 && host_integerp (arg1
, 0))
9180 /* Bound the maximum adjustment to twice the range of the
9181 mode's valid exponents. Use abs to ensure the range is
9182 positive as a sanity check. */
9183 const long max_exp_adj
= 2 *
9184 labs (REAL_MODE_FORMAT (TYPE_MODE (type
))->emax
9185 - REAL_MODE_FORMAT (TYPE_MODE (type
))->emin
);
9187 /* Get the user-requested adjustment. */
9188 const HOST_WIDE_INT req_exp_adj
= tree_low_cst (arg1
, 0);
9190 /* The requested adjustment must be inside this range. This
9191 is a preliminary cap to avoid things like overflow, we
9192 may still fail to compute the result for other reasons. */
9193 if (-max_exp_adj
< req_exp_adj
&& req_exp_adj
< max_exp_adj
)
9195 REAL_VALUE_TYPE initial_result
;
9197 real_ldexp (&initial_result
, &TREE_REAL_CST (arg0
), req_exp_adj
);
9199 /* Ensure we didn't overflow. */
9200 if (! real_isinf (&initial_result
))
9202 const REAL_VALUE_TYPE trunc_result
9203 = real_value_truncate (TYPE_MODE (type
), initial_result
);
9205 /* Only proceed if the target mode can hold the
9207 if (REAL_VALUES_EQUAL (initial_result
, trunc_result
))
9208 return build_real (type
, trunc_result
);
9217 /* Fold a call to builtin modf. */
9220 fold_builtin_modf (tree arg0
, tree arg1
, tree rettype
)
9222 if (! validate_arg (arg0
, REAL_TYPE
) || ! validate_arg (arg1
, POINTER_TYPE
))
9227 if (!(TREE_CODE (arg0
) == REAL_CST
&& ! TREE_OVERFLOW (arg0
)))
9230 arg1
= build_fold_indirect_ref (arg1
);
9232 /* Proceed if a valid pointer type was passed in. */
9233 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1
)) == TYPE_MAIN_VARIANT (rettype
))
9235 const REAL_VALUE_TYPE
*const value
= TREE_REAL_CST_PTR (arg0
);
9236 REAL_VALUE_TYPE trunc
, frac
;
9242 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9243 trunc
= frac
= *value
;
9246 /* For +-Inf, return (*arg1 = arg0, +-0). */
9248 frac
.sign
= value
->sign
;
9252 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9253 real_trunc (&trunc
, VOIDmode
, value
);
9254 real_arithmetic (&frac
, MINUS_EXPR
, value
, &trunc
);
9255 /* If the original number was negative and already
9256 integral, then the fractional part is -0.0. */
9257 if (value
->sign
&& frac
.cl
== rvc_zero
)
9258 frac
.sign
= value
->sign
;
9262 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9263 arg1
= fold_build2 (MODIFY_EXPR
, rettype
, arg1
,
9264 build_real (rettype
, trunc
));
9265 TREE_SIDE_EFFECTS (arg1
) = 1;
9266 return fold_build2 (COMPOUND_EXPR
, rettype
, arg1
,
9267 build_real (rettype
, frac
));
9273 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9274 ARG is the argument for the call. */
9277 fold_builtin_classify (tree fndecl
, tree arg
, int builtin_index
)
9279 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9282 if (!validate_arg (arg
, REAL_TYPE
))
9284 error ("non-floating-point argument to function %qs",
9285 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
9286 return error_mark_node
;
9289 switch (builtin_index
)
9291 case BUILT_IN_ISINF
:
9292 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
9293 return omit_one_operand (type
, integer_zero_node
, arg
);
9295 if (TREE_CODE (arg
) == REAL_CST
)
9297 r
= TREE_REAL_CST (arg
);
9298 if (real_isinf (&r
))
9299 return real_compare (GT_EXPR
, &r
, &dconst0
)
9300 ? integer_one_node
: integer_minus_one_node
;
9302 return integer_zero_node
;
9307 case BUILT_IN_FINITE
:
9308 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
)))
9309 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
9310 return omit_one_operand (type
, integer_one_node
, arg
);
9312 if (TREE_CODE (arg
) == REAL_CST
)
9314 r
= TREE_REAL_CST (arg
);
9315 return real_isinf (&r
) || real_isnan (&r
)
9316 ? integer_zero_node
: integer_one_node
;
9321 case BUILT_IN_ISNAN
:
9322 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg
))))
9323 return omit_one_operand (type
, integer_zero_node
, arg
);
9325 if (TREE_CODE (arg
) == REAL_CST
)
9327 r
= TREE_REAL_CST (arg
);
9328 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
9331 arg
= builtin_save_expr (arg
);
9332 return fold_build2 (UNORDERED_EXPR
, type
, arg
, arg
);
9339 /* Fold a call to an unordered comparison function such as
9340 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9341 being called and ARG0 and ARG1 are the arguments for the call.
9342 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9343 the opposite of the desired result. UNORDERED_CODE is used
9344 for modes that can hold NaNs and ORDERED_CODE is used for
9348 fold_builtin_unordered_cmp (tree fndecl
, tree arg0
, tree arg1
,
9349 enum tree_code unordered_code
,
9350 enum tree_code ordered_code
)
9352 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9353 enum tree_code code
;
9355 enum tree_code code0
, code1
;
9356 tree cmp_type
= NULL_TREE
;
9358 type0
= TREE_TYPE (arg0
);
9359 type1
= TREE_TYPE (arg1
);
9361 code0
= TREE_CODE (type0
);
9362 code1
= TREE_CODE (type1
);
9364 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
9365 /* Choose the wider of two real types. */
9366 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
9368 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
9370 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
9374 error ("non-floating-point argument to function %qs",
9375 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
9376 return error_mark_node
;
9379 arg0
= fold_convert (cmp_type
, arg0
);
9380 arg1
= fold_convert (cmp_type
, arg1
);
9382 if (unordered_code
== UNORDERED_EXPR
)
9384 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
9385 return omit_two_operands (type
, integer_zero_node
, arg0
, arg1
);
9386 return fold_build2 (UNORDERED_EXPR
, type
, arg0
, arg1
);
9389 code
= HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0
))) ? unordered_code
9391 return fold_build1 (TRUTH_NOT_EXPR
, type
,
9392 fold_build2 (code
, type
, arg0
, arg1
));
9395 /* Fold a call to built-in function FNDECL with 0 arguments.
9396 IGNORE is true if the result of the function call is ignored. This
9397 function returns NULL_TREE if no simplification was possible. */
9400 fold_builtin_0 (tree fndecl
, bool ignore ATTRIBUTE_UNUSED
)
9402 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9403 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9406 CASE_FLT_FN (BUILT_IN_INF
):
9407 case BUILT_IN_INFD32
:
9408 case BUILT_IN_INFD64
:
9409 case BUILT_IN_INFD128
:
9410 return fold_builtin_inf (type
, true);
9412 CASE_FLT_FN (BUILT_IN_HUGE_VAL
):
9413 return fold_builtin_inf (type
, false);
9415 case BUILT_IN_CLASSIFY_TYPE
:
9416 return fold_builtin_classify_type (NULL_TREE
);
9424 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9425 IGNORE is true if the result of the function call is ignored. This
9426 function returns NULL_TREE if no simplification was possible. */
9429 fold_builtin_1 (tree fndecl
, tree arg0
, bool ignore
)
9431 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9432 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9436 case BUILT_IN_CONSTANT_P
:
9438 tree val
= fold_builtin_constant_p (arg0
);
9440 /* Gimplification will pull the CALL_EXPR for the builtin out of
9441 an if condition. When not optimizing, we'll not CSE it back.
9442 To avoid link error types of regressions, return false now. */
9443 if (!val
&& !optimize
)
9444 val
= integer_zero_node
;
9449 case BUILT_IN_CLASSIFY_TYPE
:
9450 return fold_builtin_classify_type (arg0
);
9452 case BUILT_IN_STRLEN
:
9453 return fold_builtin_strlen (arg0
);
9455 CASE_FLT_FN (BUILT_IN_FABS
):
9456 return fold_builtin_fabs (arg0
, type
);
9460 case BUILT_IN_LLABS
:
9461 case BUILT_IN_IMAXABS
:
9462 return fold_builtin_abs (arg0
, type
);
9464 CASE_FLT_FN (BUILT_IN_CONJ
):
9465 if (validate_arg (arg0
, COMPLEX_TYPE
))
9466 return fold_build1 (CONJ_EXPR
, type
, arg0
);
9469 CASE_FLT_FN (BUILT_IN_CREAL
):
9470 if (validate_arg (arg0
, COMPLEX_TYPE
))
9471 return non_lvalue (fold_build1 (REALPART_EXPR
, type
, arg0
));;
9474 CASE_FLT_FN (BUILT_IN_CIMAG
):
9475 if (validate_arg (arg0
, COMPLEX_TYPE
))
9476 return non_lvalue (fold_build1 (IMAGPART_EXPR
, type
, arg0
));
9479 CASE_FLT_FN (BUILT_IN_CCOS
):
9480 CASE_FLT_FN (BUILT_IN_CCOSH
):
9481 /* These functions are "even", i.e. f(x) == f(-x). */
9482 if (validate_arg (arg0
, COMPLEX_TYPE
))
9484 tree narg
= fold_strip_sign_ops (arg0
);
9486 return build_call_expr (fndecl
, 1, narg
);
9490 CASE_FLT_FN (BUILT_IN_CABS
):
9491 return fold_builtin_cabs (arg0
, type
, fndecl
);
9493 CASE_FLT_FN (BUILT_IN_CARG
):
9494 return fold_builtin_carg (arg0
, type
);
9496 CASE_FLT_FN (BUILT_IN_SQRT
):
9497 return fold_builtin_sqrt (arg0
, type
);
9499 CASE_FLT_FN (BUILT_IN_CBRT
):
9500 return fold_builtin_cbrt (arg0
, type
);
9502 CASE_FLT_FN (BUILT_IN_ASIN
):
9503 if (validate_arg (arg0
, REAL_TYPE
))
9504 return do_mpfr_arg1 (arg0
, type
, mpfr_asin
,
9505 &dconstm1
, &dconst1
, true);
9508 CASE_FLT_FN (BUILT_IN_ACOS
):
9509 if (validate_arg (arg0
, REAL_TYPE
))
9510 return do_mpfr_arg1 (arg0
, type
, mpfr_acos
,
9511 &dconstm1
, &dconst1
, true);
9514 CASE_FLT_FN (BUILT_IN_ATAN
):
9515 if (validate_arg (arg0
, REAL_TYPE
))
9516 return do_mpfr_arg1 (arg0
, type
, mpfr_atan
, NULL
, NULL
, 0);
9519 CASE_FLT_FN (BUILT_IN_ASINH
):
9520 if (validate_arg (arg0
, REAL_TYPE
))
9521 return do_mpfr_arg1 (arg0
, type
, mpfr_asinh
, NULL
, NULL
, 0);
9524 CASE_FLT_FN (BUILT_IN_ACOSH
):
9525 if (validate_arg (arg0
, REAL_TYPE
))
9526 return do_mpfr_arg1 (arg0
, type
, mpfr_acosh
,
9527 &dconst1
, NULL
, true);
9530 CASE_FLT_FN (BUILT_IN_ATANH
):
9531 if (validate_arg (arg0
, REAL_TYPE
))
9532 return do_mpfr_arg1 (arg0
, type
, mpfr_atanh
,
9533 &dconstm1
, &dconst1
, false);
9536 CASE_FLT_FN (BUILT_IN_SIN
):
9537 if (validate_arg (arg0
, REAL_TYPE
))
9538 return do_mpfr_arg1 (arg0
, type
, mpfr_sin
, NULL
, NULL
, 0);
9541 CASE_FLT_FN (BUILT_IN_COS
):
9542 return fold_builtin_cos (arg0
, type
, fndecl
);
9545 CASE_FLT_FN (BUILT_IN_TAN
):
9546 return fold_builtin_tan (arg0
, type
);
9548 CASE_FLT_FN (BUILT_IN_CEXP
):
9549 return fold_builtin_cexp (arg0
, type
);
9551 CASE_FLT_FN (BUILT_IN_CEXPI
):
9552 if (validate_arg (arg0
, REAL_TYPE
))
9553 return do_mpfr_sincos (arg0
, NULL_TREE
, NULL_TREE
);
9556 CASE_FLT_FN (BUILT_IN_SINH
):
9557 if (validate_arg (arg0
, REAL_TYPE
))
9558 return do_mpfr_arg1 (arg0
, type
, mpfr_sinh
, NULL
, NULL
, 0);
9561 CASE_FLT_FN (BUILT_IN_COSH
):
9562 return fold_builtin_cosh (arg0
, type
, fndecl
);
9564 CASE_FLT_FN (BUILT_IN_TANH
):
9565 if (validate_arg (arg0
, REAL_TYPE
))
9566 return do_mpfr_arg1 (arg0
, type
, mpfr_tanh
, NULL
, NULL
, 0);
9569 CASE_FLT_FN (BUILT_IN_ERF
):
9570 if (validate_arg (arg0
, REAL_TYPE
))
9571 return do_mpfr_arg1 (arg0
, type
, mpfr_erf
, NULL
, NULL
, 0);
9574 CASE_FLT_FN (BUILT_IN_ERFC
):
9575 if (validate_arg (arg0
, REAL_TYPE
))
9576 return do_mpfr_arg1 (arg0
, type
, mpfr_erfc
, NULL
, NULL
, 0);
9579 CASE_FLT_FN (BUILT_IN_TGAMMA
):
9580 if (validate_arg (arg0
, REAL_TYPE
))
9581 return do_mpfr_arg1 (arg0
, type
, mpfr_gamma
, NULL
, NULL
, 0);
9584 CASE_FLT_FN (BUILT_IN_EXP
):
9585 return fold_builtin_exponent (fndecl
, arg0
, mpfr_exp
);
9587 CASE_FLT_FN (BUILT_IN_EXP2
):
9588 return fold_builtin_exponent (fndecl
, arg0
, mpfr_exp2
);
9590 CASE_FLT_FN (BUILT_IN_EXP10
):
9591 CASE_FLT_FN (BUILT_IN_POW10
):
9592 return fold_builtin_exponent (fndecl
, arg0
, mpfr_exp10
);
9594 CASE_FLT_FN (BUILT_IN_EXPM1
):
9595 if (validate_arg (arg0
, REAL_TYPE
))
9596 return do_mpfr_arg1 (arg0
, type
, mpfr_expm1
, NULL
, NULL
, 0);
9599 CASE_FLT_FN (BUILT_IN_LOG
):
9600 return fold_builtin_logarithm (fndecl
, arg0
, mpfr_log
);
9602 CASE_FLT_FN (BUILT_IN_LOG2
):
9603 return fold_builtin_logarithm (fndecl
, arg0
, mpfr_log2
);
9605 CASE_FLT_FN (BUILT_IN_LOG10
):
9606 return fold_builtin_logarithm (fndecl
, arg0
, mpfr_log10
);
9608 CASE_FLT_FN (BUILT_IN_LOG1P
):
9609 if (validate_arg (arg0
, REAL_TYPE
))
9610 return do_mpfr_arg1 (arg0
, type
, mpfr_log1p
,
9611 &dconstm1
, NULL
, false);
9614 CASE_FLT_FN (BUILT_IN_NAN
):
9615 case BUILT_IN_NAND32
:
9616 case BUILT_IN_NAND64
:
9617 case BUILT_IN_NAND128
:
9618 return fold_builtin_nan (arg0
, type
, true);
9620 CASE_FLT_FN (BUILT_IN_NANS
):
9621 return fold_builtin_nan (arg0
, type
, false);
9623 CASE_FLT_FN (BUILT_IN_FLOOR
):
9624 return fold_builtin_floor (fndecl
, arg0
);
9626 CASE_FLT_FN (BUILT_IN_CEIL
):
9627 return fold_builtin_ceil (fndecl
, arg0
);
9629 CASE_FLT_FN (BUILT_IN_TRUNC
):
9630 return fold_builtin_trunc (fndecl
, arg0
);
9632 CASE_FLT_FN (BUILT_IN_ROUND
):
9633 return fold_builtin_round (fndecl
, arg0
);
9635 CASE_FLT_FN (BUILT_IN_NEARBYINT
):
9636 CASE_FLT_FN (BUILT_IN_RINT
):
9637 return fold_trunc_transparent_mathfn (fndecl
, arg0
);
9639 CASE_FLT_FN (BUILT_IN_LCEIL
):
9640 CASE_FLT_FN (BUILT_IN_LLCEIL
):
9641 CASE_FLT_FN (BUILT_IN_LFLOOR
):
9642 CASE_FLT_FN (BUILT_IN_LLFLOOR
):
9643 CASE_FLT_FN (BUILT_IN_LROUND
):
9644 CASE_FLT_FN (BUILT_IN_LLROUND
):
9645 return fold_builtin_int_roundingfn (fndecl
, arg0
);
9647 CASE_FLT_FN (BUILT_IN_LRINT
):
9648 CASE_FLT_FN (BUILT_IN_LLRINT
):
9649 return fold_fixed_mathfn (fndecl
, arg0
);
9651 case BUILT_IN_BSWAP32
:
9652 case BUILT_IN_BSWAP64
:
9653 return fold_builtin_bswap (fndecl
, arg0
);
9655 CASE_INT_FN (BUILT_IN_FFS
):
9656 CASE_INT_FN (BUILT_IN_CLZ
):
9657 CASE_INT_FN (BUILT_IN_CTZ
):
9658 CASE_INT_FN (BUILT_IN_POPCOUNT
):
9659 CASE_INT_FN (BUILT_IN_PARITY
):
9660 return fold_builtin_bitop (fndecl
, arg0
);
9662 CASE_FLT_FN (BUILT_IN_SIGNBIT
):
9663 return fold_builtin_signbit (arg0
, type
);
9665 CASE_FLT_FN (BUILT_IN_SIGNIFICAND
):
9666 return fold_builtin_significand (arg0
, type
);
9668 CASE_FLT_FN (BUILT_IN_ILOGB
):
9669 CASE_FLT_FN (BUILT_IN_LOGB
):
9670 return fold_builtin_logb (arg0
, type
);
9672 case BUILT_IN_ISASCII
:
9673 return fold_builtin_isascii (arg0
);
9675 case BUILT_IN_TOASCII
:
9676 return fold_builtin_toascii (arg0
);
9678 case BUILT_IN_ISDIGIT
:
9679 return fold_builtin_isdigit (arg0
);
9681 CASE_FLT_FN (BUILT_IN_FINITE
):
9682 case BUILT_IN_FINITED32
:
9683 case BUILT_IN_FINITED64
:
9684 case BUILT_IN_FINITED128
:
9685 return fold_builtin_classify (fndecl
, arg0
, BUILT_IN_FINITE
);
9687 CASE_FLT_FN (BUILT_IN_ISINF
):
9688 case BUILT_IN_ISINFD32
:
9689 case BUILT_IN_ISINFD64
:
9690 case BUILT_IN_ISINFD128
:
9691 return fold_builtin_classify (fndecl
, arg0
, BUILT_IN_ISINF
);
9693 CASE_FLT_FN (BUILT_IN_ISNAN
):
9694 case BUILT_IN_ISNAND32
:
9695 case BUILT_IN_ISNAND64
:
9696 case BUILT_IN_ISNAND128
:
9697 return fold_builtin_classify (fndecl
, arg0
, BUILT_IN_ISNAN
);
9699 case BUILT_IN_PRINTF
:
9700 case BUILT_IN_PRINTF_UNLOCKED
:
9701 case BUILT_IN_VPRINTF
:
9702 return fold_builtin_printf (fndecl
, arg0
, NULL_TREE
, ignore
, fcode
);
9712 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9713 IGNORE is true if the result of the function call is ignored. This
9714 function returns NULL_TREE if no simplification was possible. */
9717 fold_builtin_2 (tree fndecl
, tree arg0
, tree arg1
, bool ignore
)
9719 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9720 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9725 CASE_FLT_FN (BUILT_IN_ATAN2
):
9726 if (validate_arg (arg0
, REAL_TYPE
)
9727 && validate_arg(arg1
, REAL_TYPE
))
9728 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_atan2
);
9731 CASE_FLT_FN (BUILT_IN_FDIM
):
9732 if (validate_arg (arg0
, REAL_TYPE
)
9733 && validate_arg(arg1
, REAL_TYPE
))
9734 return do_mpfr_arg2 (arg0
, arg1
, type
, mpfr_dim
);
9737 CASE_FLT_FN (BUILT_IN_HYPOT
):
9738 return fold_builtin_hypot (fndecl
, arg0
, arg1
, type
);
9740 CASE_FLT_FN (BUILT_IN_LDEXP
):
9741 return fold_builtin_load_exponent (arg0
, arg1
, type
, /*ldexp=*/true);
9742 CASE_FLT_FN (BUILT_IN_SCALBN
):
9743 CASE_FLT_FN (BUILT_IN_SCALBLN
):
9744 return fold_builtin_load_exponent (arg0
, arg1
, type
, /*ldexp=*/false);
9746 CASE_FLT_FN (BUILT_IN_FREXP
):
9747 return fold_builtin_frexp (arg0
, arg1
, type
);
9749 CASE_FLT_FN (BUILT_IN_MODF
):
9750 return fold_builtin_modf (arg0
, arg1
, type
);
9752 case BUILT_IN_BZERO
:
9753 return fold_builtin_bzero (arg0
, arg1
, ignore
);
9755 case BUILT_IN_FPUTS
:
9756 return fold_builtin_fputs (arg0
, arg1
, ignore
, false, NULL_TREE
);
9758 case BUILT_IN_FPUTS_UNLOCKED
:
9759 return fold_builtin_fputs (arg0
, arg1
, ignore
, true, NULL_TREE
);
9761 case BUILT_IN_STRSTR
:
9762 return fold_builtin_strstr (arg0
, arg1
, type
);
9764 case BUILT_IN_STRCAT
:
9765 return fold_builtin_strcat (arg0
, arg1
);
9767 case BUILT_IN_STRSPN
:
9768 return fold_builtin_strspn (arg0
, arg1
);
9770 case BUILT_IN_STRCSPN
:
9771 return fold_builtin_strcspn (arg0
, arg1
);
9773 case BUILT_IN_STRCHR
:
9774 case BUILT_IN_INDEX
:
9775 return fold_builtin_strchr (arg0
, arg1
, type
);
9777 case BUILT_IN_STRRCHR
:
9778 case BUILT_IN_RINDEX
:
9779 return fold_builtin_strrchr (arg0
, arg1
, type
);
9781 case BUILT_IN_STRCPY
:
9782 return fold_builtin_strcpy (fndecl
, arg0
, arg1
, NULL_TREE
);
9784 case BUILT_IN_STRCMP
:
9785 return fold_builtin_strcmp (arg0
, arg1
);
9787 case BUILT_IN_STRPBRK
:
9788 return fold_builtin_strpbrk (arg0
, arg1
, type
);
9790 case BUILT_IN_EXPECT
:
9791 return fold_builtin_expect (arg0
);
9793 CASE_FLT_FN (BUILT_IN_POW
):
9794 return fold_builtin_pow (fndecl
, arg0
, arg1
, type
);
9796 CASE_FLT_FN (BUILT_IN_POWI
):
9797 return fold_builtin_powi (fndecl
, arg0
, arg1
, type
);
9799 CASE_FLT_FN (BUILT_IN_COPYSIGN
):
9800 return fold_builtin_copysign (fndecl
, arg0
, arg1
, type
);
9802 CASE_FLT_FN (BUILT_IN_FMIN
):
9803 return fold_builtin_fmin_fmax (arg0
, arg1
, type
, /*max=*/false);
9805 CASE_FLT_FN (BUILT_IN_FMAX
):
9806 return fold_builtin_fmin_fmax (arg0
, arg1
, type
, /*max=*/true);
9808 case BUILT_IN_ISGREATER
:
9809 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNLE_EXPR
, LE_EXPR
);
9810 case BUILT_IN_ISGREATEREQUAL
:
9811 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNLT_EXPR
, LT_EXPR
);
9812 case BUILT_IN_ISLESS
:
9813 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNGE_EXPR
, GE_EXPR
);
9814 case BUILT_IN_ISLESSEQUAL
:
9815 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNGT_EXPR
, GT_EXPR
);
9816 case BUILT_IN_ISLESSGREATER
:
9817 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNEQ_EXPR
, EQ_EXPR
);
9818 case BUILT_IN_ISUNORDERED
:
9819 return fold_builtin_unordered_cmp (fndecl
, arg0
, arg1
, UNORDERED_EXPR
,
9822 /* We do the folding for va_start in the expander. */
9823 case BUILT_IN_VA_START
:
9826 case BUILT_IN_SPRINTF
:
9827 return fold_builtin_sprintf (arg0
, arg1
, NULL_TREE
, ignore
);
9829 case BUILT_IN_OBJECT_SIZE
:
9830 return fold_builtin_object_size (arg0
, arg1
);
9832 case BUILT_IN_PRINTF
:
9833 case BUILT_IN_PRINTF_UNLOCKED
:
9834 case BUILT_IN_VPRINTF
:
9835 return fold_builtin_printf (fndecl
, arg0
, arg1
, ignore
, fcode
);
9837 case BUILT_IN_PRINTF_CHK
:
9838 case BUILT_IN_VPRINTF_CHK
:
9839 if (!validate_arg (arg0
, INTEGER_TYPE
)
9840 || TREE_SIDE_EFFECTS (arg0
))
9843 return fold_builtin_printf (fndecl
, arg1
, NULL_TREE
, ignore
, fcode
);
9846 case BUILT_IN_FPRINTF
:
9847 case BUILT_IN_FPRINTF_UNLOCKED
:
9848 case BUILT_IN_VFPRINTF
:
9849 return fold_builtin_fprintf (fndecl
, arg0
, arg1
, NULL_TREE
,
9858 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9859 and ARG2. IGNORE is true if the result of the function call is ignored.
9860 This function returns NULL_TREE if no simplification was possible. */
9863 fold_builtin_3 (tree fndecl
, tree arg0
, tree arg1
, tree arg2
, bool ignore
)
9865 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
9866 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9870 CASE_FLT_FN (BUILT_IN_SINCOS
):
9871 return fold_builtin_sincos (arg0
, arg1
, arg2
);
9873 CASE_FLT_FN (BUILT_IN_FMA
):
9874 if (validate_arg (arg0
, REAL_TYPE
)
9875 && validate_arg(arg1
, REAL_TYPE
)
9876 && validate_arg(arg2
, REAL_TYPE
))
9877 return do_mpfr_arg3 (arg0
, arg1
, arg2
, type
, mpfr_fma
);
9880 case BUILT_IN_MEMSET
:
9881 return fold_builtin_memset (arg0
, arg1
, arg2
, type
, ignore
);
9883 case BUILT_IN_BCOPY
:
9884 return fold_builtin_memory_op (arg1
, arg0
, arg2
, void_type_node
, true, /*endp=*/3);
9886 case BUILT_IN_MEMCPY
:
9887 return fold_builtin_memory_op (arg0
, arg1
, arg2
, type
, ignore
, /*endp=*/0);
9889 case BUILT_IN_MEMPCPY
:
9890 return fold_builtin_memory_op (arg0
, arg1
, arg2
, type
, ignore
, /*endp=*/1);
9892 case BUILT_IN_MEMMOVE
:
9893 return fold_builtin_memory_op (arg0
, arg1
, arg2
, type
, ignore
, /*endp=*/3);
9895 case BUILT_IN_STRNCAT
:
9896 return fold_builtin_strncat (arg0
, arg1
, arg2
);
9898 case BUILT_IN_STRNCPY
:
9899 return fold_builtin_strncpy (fndecl
, arg0
, arg1
, arg2
, NULL_TREE
);
9901 case BUILT_IN_STRNCMP
:
9902 return fold_builtin_strncmp (arg0
, arg1
, arg2
);
9905 case BUILT_IN_MEMCMP
:
9906 return fold_builtin_memcmp (arg0
, arg1
, arg2
);;
9908 case BUILT_IN_SPRINTF
:
9909 return fold_builtin_sprintf (arg0
, arg1
, arg2
, ignore
);
9911 case BUILT_IN_STRCPY_CHK
:
9912 case BUILT_IN_STPCPY_CHK
:
9913 return fold_builtin_stxcpy_chk (fndecl
, arg0
, arg1
, arg2
, NULL_TREE
,
9916 case BUILT_IN_STRCAT_CHK
:
9917 return fold_builtin_strcat_chk (fndecl
, arg0
, arg1
, arg2
);
9919 case BUILT_IN_PRINTF_CHK
:
9920 case BUILT_IN_VPRINTF_CHK
:
9921 if (!validate_arg (arg0
, INTEGER_TYPE
)
9922 || TREE_SIDE_EFFECTS (arg0
))
9925 return fold_builtin_printf (fndecl
, arg1
, arg2
, ignore
, fcode
);
9928 case BUILT_IN_FPRINTF
:
9929 case BUILT_IN_FPRINTF_UNLOCKED
:
9930 case BUILT_IN_VFPRINTF
:
9931 return fold_builtin_fprintf (fndecl
, arg0
, arg1
, arg2
, ignore
, fcode
);
9933 case BUILT_IN_FPRINTF_CHK
:
9934 case BUILT_IN_VFPRINTF_CHK
:
9935 if (!validate_arg (arg1
, INTEGER_TYPE
)
9936 || TREE_SIDE_EFFECTS (arg1
))
9939 return fold_builtin_fprintf (fndecl
, arg0
, arg2
, NULL_TREE
,
9948 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
9949 ARG2, and ARG3. IGNORE is true if the result of the function call is
9950 ignored. This function returns NULL_TREE if no simplification was
9954 fold_builtin_4 (tree fndecl
, tree arg0
, tree arg1
, tree arg2
, tree arg3
,
9957 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
9961 case BUILT_IN_MEMCPY_CHK
:
9962 case BUILT_IN_MEMPCPY_CHK
:
9963 case BUILT_IN_MEMMOVE_CHK
:
9964 case BUILT_IN_MEMSET_CHK
:
9965 return fold_builtin_memory_chk (fndecl
, arg0
, arg1
, arg2
, arg3
,
9967 DECL_FUNCTION_CODE (fndecl
));
9969 case BUILT_IN_STRNCPY_CHK
:
9970 return fold_builtin_strncpy_chk (arg0
, arg1
, arg2
, arg3
, NULL_TREE
);
9972 case BUILT_IN_STRNCAT_CHK
:
9973 return fold_builtin_strncat_chk (fndecl
, arg0
, arg1
, arg2
, arg3
);
9975 case BUILT_IN_FPRINTF_CHK
:
9976 case BUILT_IN_VFPRINTF_CHK
:
9977 if (!validate_arg (arg1
, INTEGER_TYPE
)
9978 || TREE_SIDE_EFFECTS (arg1
))
9981 return fold_builtin_fprintf (fndecl
, arg0
, arg2
, arg3
,
9991 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9992 arguments, where NARGS <= 4. IGNORE is true if the result of the
9993 function call is ignored. This function returns NULL_TREE if no
9994 simplification was possible. Note that this only folds builtins with
9995 fixed argument patterns. Foldings that do varargs-to-varargs
9996 transformations, or that match calls with more than 4 arguments,
9997 need to be handled with fold_builtin_varargs instead. */
9999 #define MAX_ARGS_TO_FOLD_BUILTIN 4
10002 fold_builtin_n (tree fndecl
, tree
*args
, int nargs
, bool ignore
)
10004 tree ret
= NULL_TREE
;
10008 ret
= fold_builtin_0 (fndecl
, ignore
);
10011 ret
= fold_builtin_1 (fndecl
, args
[0], ignore
);
10014 ret
= fold_builtin_2 (fndecl
, args
[0], args
[1], ignore
);
10017 ret
= fold_builtin_3 (fndecl
, args
[0], args
[1], args
[2], ignore
);
10020 ret
= fold_builtin_4 (fndecl
, args
[0], args
[1], args
[2], args
[3],
10028 ret
= build1 (NOP_EXPR
, GENERIC_TREE_TYPE (ret
), ret
);
10029 TREE_NO_WARNING (ret
) = 1;
10035 /* Builtins with folding operations that operate on "..." arguments
10036 need special handling; we need to store the arguments in a convenient
10037 data structure before attempting any folding. Fortunately there are
10038 only a few builtins that fall into this category. FNDECL is the
10039 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
10040 result of the function call is ignored. */
10043 fold_builtin_varargs (tree fndecl
, tree exp
, bool ignore ATTRIBUTE_UNUSED
)
10045 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
10046 tree ret
= NULL_TREE
;
10050 case BUILT_IN_SPRINTF_CHK
:
10051 case BUILT_IN_VSPRINTF_CHK
:
10052 ret
= fold_builtin_sprintf_chk (exp
, fcode
);
10055 case BUILT_IN_SNPRINTF_CHK
:
10056 case BUILT_IN_VSNPRINTF_CHK
:
10057 ret
= fold_builtin_snprintf_chk (exp
, NULL_TREE
, fcode
);
10064 ret
= build1 (NOP_EXPR
, TREE_TYPE (ret
), ret
);
10065 TREE_NO_WARNING (ret
) = 1;
10071 /* A wrapper function for builtin folding that prevents warnings for
10072 "statement without effect" and the like, caused by removing the
10073 call node earlier than the warning is generated. */
10076 fold_call_expr (tree exp
, bool ignore
)
10078 tree ret
= NULL_TREE
;
10079 tree fndecl
= get_callee_fndecl (exp
);
10081 && TREE_CODE (fndecl
) == FUNCTION_DECL
10082 && DECL_BUILT_IN (fndecl
))
10084 /* FIXME: Don't use a list in this interface. */
10085 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10086 return targetm
.fold_builtin (fndecl
, CALL_EXPR_ARGS (exp
), ignore
);
10089 int nargs
= call_expr_nargs (exp
);
10090 if (nargs
<= MAX_ARGS_TO_FOLD_BUILTIN
)
10092 tree
*args
= CALL_EXPR_ARGP (exp
);
10093 ret
= fold_builtin_n (fndecl
, args
, nargs
, ignore
);
10096 ret
= fold_builtin_varargs (fndecl
, exp
, ignore
);
10099 /* Propagate location information from original call to
10100 expansion of builtin. Otherwise things like
10101 maybe_emit_chk_warning, that operate on the expansion
10102 of a builtin, will use the wrong location information. */
10103 if (CAN_HAVE_LOCATION_P (exp
) && EXPR_HAS_LOCATION (exp
))
10105 tree realret
= ret
;
10106 if (TREE_CODE (ret
) == NOP_EXPR
)
10107 realret
= TREE_OPERAND (ret
, 0);
10108 if (CAN_HAVE_LOCATION_P (realret
)
10109 && !EXPR_HAS_LOCATION (realret
))
10110 SET_EXPR_LOCATION (realret
, EXPR_LOCATION (exp
));
10119 /* Conveniently construct a function call expression. FNDECL names the
10120 function to be called and ARGLIST is a TREE_LIST of arguments. */
10123 build_function_call_expr (tree fndecl
, tree arglist
)
10125 tree fntype
= TREE_TYPE (fndecl
);
10126 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10127 int n
= list_length (arglist
);
10128 tree
*argarray
= (tree
*) alloca (n
* sizeof (tree
));
10131 for (i
= 0; i
< n
; i
++, arglist
= TREE_CHAIN (arglist
))
10132 argarray
[i
] = TREE_VALUE (arglist
);
10133 return fold_builtin_call_array (TREE_TYPE (fntype
), fn
, n
, argarray
);
10136 /* Conveniently construct a function call expression. FNDECL names the
10137 function to be called, N is the number of arguments, and the "..."
10138 parameters are the argument expressions. */
10141 build_call_expr (tree fndecl
, int n
, ...)
10144 tree fntype
= TREE_TYPE (fndecl
);
10145 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10146 tree
*argarray
= (tree
*) alloca (n
* sizeof (tree
));
10150 for (i
= 0; i
< n
; i
++)
10151 argarray
[i
] = va_arg (ap
, tree
);
10153 return fold_builtin_call_array (TREE_TYPE (fntype
), fn
, n
, argarray
);
10156 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
10157 N arguments are passed in the array ARGARRAY. */
10160 fold_builtin_call_array (tree type
,
10165 tree ret
= NULL_TREE
;
10169 if (TREE_CODE (fn
) == ADDR_EXPR
)
10171 tree fndecl
= TREE_OPERAND (fn
, 0);
10172 if (TREE_CODE (fndecl
) == FUNCTION_DECL
10173 && DECL_BUILT_IN (fndecl
))
10175 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
10177 tree arglist
= NULL_TREE
;
10178 for (i
= n
- 1; i
>= 0; i
--)
10179 arglist
= tree_cons (NULL_TREE
, argarray
[i
], arglist
);
10180 ret
= targetm
.fold_builtin (fndecl
, arglist
, false);
10184 else if (n
<= MAX_ARGS_TO_FOLD_BUILTIN
)
10186 /* First try the transformations that don't require consing up
10188 ret
= fold_builtin_n (fndecl
, argarray
, n
, false);
10193 /* If we got this far, we need to build an exp. */
10194 exp
= build_call_array (type
, fn
, n
, argarray
);
10195 ret
= fold_builtin_varargs (fndecl
, exp
, false);
10196 return ret
? ret
: exp
;
10200 return build_call_array (type
, fn
, n
, argarray
);
10203 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10204 along with N new arguments specified as the "..." parameters. SKIP
10205 is the number of arguments in EXP to be omitted. This function is used
10206 to do varargs-to-varargs transformations. */
10209 rewrite_call_expr (tree exp
, int skip
, tree fndecl
, int n
, ...)
10211 int oldnargs
= call_expr_nargs (exp
);
10212 int nargs
= oldnargs
- skip
+ n
;
10213 tree fntype
= TREE_TYPE (fndecl
);
10214 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
10222 buffer
= alloca (nargs
* sizeof (tree
));
10224 for (i
= 0; i
< n
; i
++)
10225 buffer
[i
] = va_arg (ap
, tree
);
10227 for (j
= skip
; j
< oldnargs
; j
++, i
++)
10228 buffer
[i
] = CALL_EXPR_ARG (exp
, j
);
10231 buffer
= CALL_EXPR_ARGP (exp
) + skip
;
10233 return fold (build_call_array (TREE_TYPE (exp
), fn
, nargs
, buffer
));
10236 /* Validate a single argument ARG against a tree code CODE representing
10240 validate_arg (tree arg
, enum tree_code code
)
10244 else if (code
== POINTER_TYPE
)
10245 return POINTER_TYPE_P (TREE_TYPE (arg
));
10246 return code
== TREE_CODE (TREE_TYPE (arg
));
10249 /* This function validates the types of a function call argument list
10250 against a specified list of tree_codes. If the last specifier is a 0,
10251 that represents an ellipses, otherwise the last specifier must be a
10255 validate_arglist (tree callexpr
, ...)
10257 enum tree_code code
;
10260 call_expr_arg_iterator iter
;
10263 va_start (ap
, callexpr
);
10264 init_call_expr_arg_iterator (callexpr
, &iter
);
10268 code
= va_arg (ap
, enum tree_code
);
10272 /* This signifies an ellipses, any further arguments are all ok. */
10276 /* This signifies an endlink, if no arguments remain, return
10277 true, otherwise return false. */
10278 res
= !more_call_expr_args_p (&iter
);
10281 /* If no parameters remain or the parameter's code does not
10282 match the specified code, return false. Otherwise continue
10283 checking any remaining arguments. */
10284 arg
= next_call_expr_arg (&iter
);
10285 if (!validate_arg (arg
, code
))
10292 /* We need gotos here since we can only have one VA_CLOSE in a
10300 /* Default target-specific builtin expander that does nothing. */
10303 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
10304 rtx target ATTRIBUTE_UNUSED
,
10305 rtx subtarget ATTRIBUTE_UNUSED
,
10306 enum machine_mode mode ATTRIBUTE_UNUSED
,
10307 int ignore ATTRIBUTE_UNUSED
)
10312 /* Returns true is EXP represents data that would potentially reside
10313 in a readonly section. */
10316 readonly_data_expr (tree exp
)
10320 if (TREE_CODE (exp
) != ADDR_EXPR
)
10323 exp
= get_base_address (TREE_OPERAND (exp
, 0));
10327 /* Make sure we call decl_readonly_section only for trees it
10328 can handle (since it returns true for everything it doesn't
10330 if (TREE_CODE (exp
) == STRING_CST
10331 || TREE_CODE (exp
) == CONSTRUCTOR
10332 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
10333 return decl_readonly_section (exp
, 0);
10338 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10339 to the call, and TYPE is its return type.
10341 Return NULL_TREE if no simplification was possible, otherwise return the
10342 simplified form of the call as a tree.
10344 The simplified form may be a constant or other expression which
10345 computes the same value, but in a more efficient manner (including
10346 calls to other builtin functions).
10348 The call may contain arguments which need to be evaluated, but
10349 which are not useful to determine the result of the call. In
10350 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10351 COMPOUND_EXPR will be an argument which must be evaluated.
10352 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10353 COMPOUND_EXPR in the chain will contain the tree for the simplified
10354 form of the builtin function call. */
10357 fold_builtin_strstr (tree s1
, tree s2
, tree type
)
10359 if (!validate_arg (s1
, POINTER_TYPE
)
10360 || !validate_arg (s2
, POINTER_TYPE
))
10365 const char *p1
, *p2
;
10367 p2
= c_getstr (s2
);
10371 p1
= c_getstr (s1
);
10374 const char *r
= strstr (p1
, p2
);
10378 return build_int_cst (TREE_TYPE (s1
), 0);
10380 /* Return an offset into the constant string argument. */
10381 tem
= fold_build2 (PLUS_EXPR
, TREE_TYPE (s1
),
10382 s1
, build_int_cst (TREE_TYPE (s1
), r
- p1
));
10383 return fold_convert (type
, tem
);
10386 /* The argument is const char *, and the result is char *, so we need
10387 a type conversion here to avoid a warning. */
10389 return fold_convert (type
, s1
);
10394 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
10398 /* New argument list transforming strstr(s1, s2) to
10399 strchr(s1, s2[0]). */
10400 return build_call_expr (fn
, 2, s1
, build_int_cst (NULL_TREE
, p2
[0]));
10404 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10405 the call, and TYPE is its return type.
10407 Return NULL_TREE if no simplification was possible, otherwise return the
10408 simplified form of the call as a tree.
10410 The simplified form may be a constant or other expression which
10411 computes the same value, but in a more efficient manner (including
10412 calls to other builtin functions).
10414 The call may contain arguments which need to be evaluated, but
10415 which are not useful to determine the result of the call. In
10416 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10417 COMPOUND_EXPR will be an argument which must be evaluated.
10418 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10419 COMPOUND_EXPR in the chain will contain the tree for the simplified
10420 form of the builtin function call. */
10423 fold_builtin_strchr (tree s1
, tree s2
, tree type
)
10425 if (!validate_arg (s1
, POINTER_TYPE
)
10426 || !validate_arg (s2
, INTEGER_TYPE
))
10432 if (TREE_CODE (s2
) != INTEGER_CST
)
10435 p1
= c_getstr (s1
);
10442 if (target_char_cast (s2
, &c
))
10445 r
= strchr (p1
, c
);
10448 return build_int_cst (TREE_TYPE (s1
), 0);
10450 /* Return an offset into the constant string argument. */
10451 tem
= fold_build2 (PLUS_EXPR
, TREE_TYPE (s1
),
10452 s1
, build_int_cst (TREE_TYPE (s1
), r
- p1
));
10453 return fold_convert (type
, tem
);
10459 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10460 the call, and TYPE is its return type.
10462 Return NULL_TREE if no simplification was possible, otherwise return the
10463 simplified form of the call as a tree.
10465 The simplified form may be a constant or other expression which
10466 computes the same value, but in a more efficient manner (including
10467 calls to other builtin functions).
10469 The call may contain arguments which need to be evaluated, but
10470 which are not useful to determine the result of the call. In
10471 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10472 COMPOUND_EXPR will be an argument which must be evaluated.
10473 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10474 COMPOUND_EXPR in the chain will contain the tree for the simplified
10475 form of the builtin function call. */
10478 fold_builtin_strrchr (tree s1
, tree s2
, tree type
)
10480 if (!validate_arg (s1
, POINTER_TYPE
)
10481 || !validate_arg (s2
, INTEGER_TYPE
))
10488 if (TREE_CODE (s2
) != INTEGER_CST
)
10491 p1
= c_getstr (s1
);
10498 if (target_char_cast (s2
, &c
))
10501 r
= strrchr (p1
, c
);
10504 return build_int_cst (TREE_TYPE (s1
), 0);
10506 /* Return an offset into the constant string argument. */
10507 tem
= fold_build2 (PLUS_EXPR
, TREE_TYPE (s1
),
10508 s1
, build_int_cst (TREE_TYPE (s1
), r
- p1
));
10509 return fold_convert (type
, tem
);
10512 if (! integer_zerop (s2
))
10515 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
10519 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
10520 return build_call_expr (fn
, 2, s1
, s2
);
10524 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10525 to the call, and TYPE is its return type.
10527 Return NULL_TREE if no simplification was possible, otherwise return the
10528 simplified form of the call as a tree.
10530 The simplified form may be a constant or other expression which
10531 computes the same value, but in a more efficient manner (including
10532 calls to other builtin functions).
10534 The call may contain arguments which need to be evaluated, but
10535 which are not useful to determine the result of the call. In
10536 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10537 COMPOUND_EXPR will be an argument which must be evaluated.
10538 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10539 COMPOUND_EXPR in the chain will contain the tree for the simplified
10540 form of the builtin function call. */
10543 fold_builtin_strpbrk (tree s1
, tree s2
, tree type
)
10545 if (!validate_arg (s1
, POINTER_TYPE
)
10546 || !validate_arg (s2
, POINTER_TYPE
))
10551 const char *p1
, *p2
;
10553 p2
= c_getstr (s2
);
10557 p1
= c_getstr (s1
);
10560 const char *r
= strpbrk (p1
, p2
);
10564 return build_int_cst (TREE_TYPE (s1
), 0);
10566 /* Return an offset into the constant string argument. */
10567 tem
= fold_build2 (PLUS_EXPR
, TREE_TYPE (s1
),
10568 s1
, build_int_cst (TREE_TYPE (s1
), r
- p1
));
10569 return fold_convert (type
, tem
);
10573 /* strpbrk(x, "") == NULL.
10574 Evaluate and ignore s1 in case it had side-effects. */
10575 return omit_one_operand (TREE_TYPE (s1
), integer_zero_node
, s1
);
10578 return NULL_TREE
; /* Really call strpbrk. */
10580 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
10584 /* New argument list transforming strpbrk(s1, s2) to
10585 strchr(s1, s2[0]). */
10586 return build_call_expr (fn
, 2, s1
, build_int_cst (NULL_TREE
, p2
[0]));
10590 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
10593 Return NULL_TREE if no simplification was possible, otherwise return the
10594 simplified form of the call as a tree.
10596 The simplified form may be a constant or other expression which
10597 computes the same value, but in a more efficient manner (including
10598 calls to other builtin functions).
10600 The call may contain arguments which need to be evaluated, but
10601 which are not useful to determine the result of the call. In
10602 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10603 COMPOUND_EXPR will be an argument which must be evaluated.
10604 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10605 COMPOUND_EXPR in the chain will contain the tree for the simplified
10606 form of the builtin function call. */
10609 fold_builtin_strcat (tree dst
, tree src
)
10611 if (!validate_arg (dst
, POINTER_TYPE
)
10612 || !validate_arg (src
, POINTER_TYPE
))
10616 const char *p
= c_getstr (src
);
10618 /* If the string length is zero, return the dst parameter. */
10619 if (p
&& *p
== '\0')
10626 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
10627 arguments to the call.
10629 Return NULL_TREE if no simplification was possible, otherwise return the
10630 simplified form of the call as a tree.
10632 The simplified form may be a constant or other expression which
10633 computes the same value, but in a more efficient manner (including
10634 calls to other builtin functions).
10636 The call may contain arguments which need to be evaluated, but
10637 which are not useful to determine the result of the call. In
10638 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10639 COMPOUND_EXPR will be an argument which must be evaluated.
10640 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10641 COMPOUND_EXPR in the chain will contain the tree for the simplified
10642 form of the builtin function call. */
10645 fold_builtin_strncat (tree dst
, tree src
, tree len
)
10647 if (!validate_arg (dst
, POINTER_TYPE
)
10648 || !validate_arg (src
, POINTER_TYPE
)
10649 || !validate_arg (len
, INTEGER_TYPE
))
10653 const char *p
= c_getstr (src
);
10655 /* If the requested length is zero, or the src parameter string
10656 length is zero, return the dst parameter. */
10657 if (integer_zerop (len
) || (p
&& *p
== '\0'))
10658 return omit_two_operands (TREE_TYPE (dst
), dst
, src
, len
);
10660 /* If the requested len is greater than or equal to the string
10661 length, call strcat. */
10662 if (TREE_CODE (len
) == INTEGER_CST
&& p
10663 && compare_tree_int (len
, strlen (p
)) >= 0)
10665 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCAT
];
10667 /* If the replacement _DECL isn't initialized, don't do the
10672 return build_call_expr (fn
, 2, dst
, src
);
10678 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10681 Return NULL_TREE if no simplification was possible, otherwise return the
10682 simplified form of the call as a tree.
10684 The simplified form may be a constant or other expression which
10685 computes the same value, but in a more efficient manner (including
10686 calls to other builtin functions).
10688 The call may contain arguments which need to be evaluated, but
10689 which are not useful to determine the result of the call. In
10690 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10691 COMPOUND_EXPR will be an argument which must be evaluated.
10692 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10693 COMPOUND_EXPR in the chain will contain the tree for the simplified
10694 form of the builtin function call. */
10697 fold_builtin_strspn (tree s1
, tree s2
)
10699 if (!validate_arg (s1
, POINTER_TYPE
)
10700 || !validate_arg (s2
, POINTER_TYPE
))
10704 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
10706 /* If both arguments are constants, evaluate at compile-time. */
10709 const size_t r
= strspn (p1
, p2
);
10710 return size_int (r
);
10713 /* If either argument is "", return NULL_TREE. */
10714 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
10715 /* Evaluate and ignore both arguments in case either one has
10717 return omit_two_operands (integer_type_node
, integer_zero_node
,
10723 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10726 Return NULL_TREE if no simplification was possible, otherwise return the
10727 simplified form of the call as a tree.
10729 The simplified form may be a constant or other expression which
10730 computes the same value, but in a more efficient manner (including
10731 calls to other builtin functions).
10733 The call may contain arguments which need to be evaluated, but
10734 which are not useful to determine the result of the call. In
10735 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10736 COMPOUND_EXPR will be an argument which must be evaluated.
10737 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10738 COMPOUND_EXPR in the chain will contain the tree for the simplified
10739 form of the builtin function call. */
10742 fold_builtin_strcspn (tree s1
, tree s2
)
10744 if (!validate_arg (s1
, POINTER_TYPE
)
10745 || !validate_arg (s2
, POINTER_TYPE
))
10749 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
10751 /* If both arguments are constants, evaluate at compile-time. */
10754 const size_t r
= strcspn (p1
, p2
);
10755 return size_int (r
);
10758 /* If the first argument is "", return NULL_TREE. */
10759 if (p1
&& *p1
== '\0')
10761 /* Evaluate and ignore argument s2 in case it has
10763 return omit_one_operand (integer_type_node
,
10764 integer_zero_node
, s2
);
10767 /* If the second argument is "", return __builtin_strlen(s1). */
10768 if (p2
&& *p2
== '\0')
10770 tree fn
= implicit_built_in_decls
[BUILT_IN_STRLEN
];
10772 /* If the replacement _DECL isn't initialized, don't do the
10777 return build_call_expr (fn
, 1, s1
);
10783 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
10784 to the call. IGNORE is true if the value returned
10785 by the builtin will be ignored. UNLOCKED is true is true if this
10786 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
10787 the known length of the string. Return NULL_TREE if no simplification
10791 fold_builtin_fputs (tree arg0
, tree arg1
, bool ignore
, bool unlocked
, tree len
)
10793 /* If we're using an unlocked function, assume the other unlocked
10794 functions exist explicitly. */
10795 tree
const fn_fputc
= unlocked
? built_in_decls
[BUILT_IN_FPUTC_UNLOCKED
]
10796 : implicit_built_in_decls
[BUILT_IN_FPUTC
];
10797 tree
const fn_fwrite
= unlocked
? built_in_decls
[BUILT_IN_FWRITE_UNLOCKED
]
10798 : implicit_built_in_decls
[BUILT_IN_FWRITE
];
10800 /* If the return value is used, don't do the transformation. */
10804 /* Verify the arguments in the original call. */
10805 if (!validate_arg (arg0
, POINTER_TYPE
)
10806 || !validate_arg (arg1
, POINTER_TYPE
))
10810 len
= c_strlen (arg0
, 0);
10812 /* Get the length of the string passed to fputs. If the length
10813 can't be determined, punt. */
10815 || TREE_CODE (len
) != INTEGER_CST
)
10818 switch (compare_tree_int (len
, 1))
10820 case -1: /* length is 0, delete the call entirely . */
10821 return omit_one_operand (integer_type_node
, integer_zero_node
, arg1
);;
10823 case 0: /* length is 1, call fputc. */
10825 const char *p
= c_getstr (arg0
);
10830 return build_call_expr (fn_fputc
, 2,
10831 build_int_cst (NULL_TREE
, p
[0]), arg1
);
10837 case 1: /* length is greater than 1, call fwrite. */
10839 /* If optimizing for size keep fputs. */
10842 /* New argument list transforming fputs(string, stream) to
10843 fwrite(string, 1, len, stream). */
10845 return build_call_expr (fn_fwrite
, 4, arg0
, size_one_node
, len
, arg1
);
10850 gcc_unreachable ();
10855 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10856 produced. False otherwise. This is done so that we don't output the error
10857 or warning twice or three times. */
10859 fold_builtin_next_arg (tree exp
, bool va_start_p
)
10861 tree fntype
= TREE_TYPE (current_function_decl
);
10862 int nargs
= call_expr_nargs (exp
);
10865 if (TYPE_ARG_TYPES (fntype
) == 0
10866 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
10867 == void_type_node
))
10869 error ("%<va_start%> used in function with fixed args");
10875 if (va_start_p
&& (nargs
!= 2))
10877 error ("wrong number of arguments to function %<va_start%>");
10880 arg
= CALL_EXPR_ARG (exp
, 1);
10882 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10883 when we checked the arguments and if needed issued a warning. */
10888 /* Evidently an out of date version of <stdarg.h>; can't validate
10889 va_start's second argument, but can still work as intended. */
10890 warning (0, "%<__builtin_next_arg%> called without an argument");
10893 else if (nargs
> 1)
10895 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10898 arg
= CALL_EXPR_ARG (exp
, 0);
10901 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10902 or __builtin_next_arg (0) the first time we see it, after checking
10903 the arguments and if needed issuing a warning. */
10904 if (!integer_zerop (arg
))
10906 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
10908 /* Strip off all nops for the sake of the comparison. This
10909 is not quite the same as STRIP_NOPS. It does more.
10910 We must also strip off INDIRECT_EXPR for C++ reference
10912 while (TREE_CODE (arg
) == NOP_EXPR
10913 || TREE_CODE (arg
) == CONVERT_EXPR
10914 || TREE_CODE (arg
) == NON_LVALUE_EXPR
10915 || TREE_CODE (arg
) == INDIRECT_REF
)
10916 arg
= TREE_OPERAND (arg
, 0);
10917 if (arg
!= last_parm
)
10919 /* FIXME: Sometimes with the tree optimizers we can get the
10920 not the last argument even though the user used the last
10921 argument. We just warn and set the arg to be the last
10922 argument so that we will get wrong-code because of
10924 warning (0, "second parameter of %<va_start%> not last named argument");
10926 /* We want to verify the second parameter just once before the tree
10927 optimizers are run and then avoid keeping it in the tree,
10928 as otherwise we could warn even for correct code like:
10929 void foo (int i, ...)
10930 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10932 CALL_EXPR_ARG (exp
, 1) = integer_zero_node
;
10934 CALL_EXPR_ARG (exp
, 0) = integer_zero_node
;
10940 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
10941 ORIG may be null if this is a 2-argument call. We don't attempt to
10942 simplify calls with more than 3 arguments.
10944 Return NULL_TREE if no simplification was possible, otherwise return the
10945 simplified form of the call as a tree. If IGNORED is true, it means that
10946 the caller does not use the returned value of the function. */
10949 fold_builtin_sprintf (tree dest
, tree fmt
, tree orig
, int ignored
)
10952 const char *fmt_str
= NULL
;
10954 /* Verify the required arguments in the original call. We deal with two
10955 types of sprintf() calls: 'sprintf (str, fmt)' and
10956 'sprintf (dest, "%s", orig)'. */
10957 if (!validate_arg (dest
, POINTER_TYPE
)
10958 || !validate_arg (fmt
, POINTER_TYPE
))
10960 if (orig
&& !validate_arg (orig
, POINTER_TYPE
))
10963 /* Check whether the format is a literal string constant. */
10964 fmt_str
= c_getstr (fmt
);
10965 if (fmt_str
== NULL
)
10969 retval
= NULL_TREE
;
10971 if (!init_target_chars ())
10974 /* If the format doesn't contain % args or %%, use strcpy. */
10975 if (strchr (fmt_str
, target_percent
) == NULL
)
10977 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
10982 /* Don't optimize sprintf (buf, "abc", ptr++). */
10986 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
10987 'format' is known to contain no % formats. */
10988 call
= build_call_expr (fn
, 2, dest
, fmt
);
10990 retval
= build_int_cst (NULL_TREE
, strlen (fmt_str
));
10993 /* If the format is "%s", use strcpy if the result isn't used. */
10994 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
10997 fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
11002 /* Don't crash on sprintf (str1, "%s"). */
11006 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
11009 retval
= c_strlen (orig
, 1);
11010 if (!retval
|| TREE_CODE (retval
) != INTEGER_CST
)
11013 call
= build_call_expr (fn
, 2, dest
, orig
);
11016 if (call
&& retval
)
11018 retval
= fold_convert
11019 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls
[BUILT_IN_SPRINTF
])),
11021 return build2 (COMPOUND_EXPR
, TREE_TYPE (retval
), call
, retval
);
11027 /* Expand a call EXP to __builtin_object_size. */
11030 expand_builtin_object_size (tree exp
)
11033 int object_size_type
;
11034 tree fndecl
= get_callee_fndecl (exp
);
11035 location_t locus
= EXPR_LOCATION (exp
);
11037 if (!validate_arglist (exp
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11039 error ("%Hfirst argument of %D must be a pointer, second integer constant",
11041 expand_builtin_trap ();
11045 ost
= CALL_EXPR_ARG (exp
, 1);
11048 if (TREE_CODE (ost
) != INTEGER_CST
11049 || tree_int_cst_sgn (ost
) < 0
11050 || compare_tree_int (ost
, 3) > 0)
11052 error ("%Hlast argument of %D is not integer constant between 0 and 3",
11054 expand_builtin_trap ();
11058 object_size_type
= tree_low_cst (ost
, 0);
11060 return object_size_type
< 2 ? constm1_rtx
: const0_rtx
;
11063 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11064 FCODE is the BUILT_IN_* to use.
11065 Return NULL_RTX if we failed; the caller should emit a normal call,
11066 otherwise try to get the result in TARGET, if convenient (and in
11067 mode MODE if that's convenient). */
11070 expand_builtin_memory_chk (tree exp
, rtx target
, enum machine_mode mode
,
11071 enum built_in_function fcode
)
11073 tree dest
, src
, len
, size
;
11075 if (!validate_arglist (exp
,
11077 fcode
== BUILT_IN_MEMSET_CHK
11078 ? INTEGER_TYPE
: POINTER_TYPE
,
11079 INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
11082 dest
= CALL_EXPR_ARG (exp
, 0);
11083 src
= CALL_EXPR_ARG (exp
, 1);
11084 len
= CALL_EXPR_ARG (exp
, 2);
11085 size
= CALL_EXPR_ARG (exp
, 3);
11087 if (! host_integerp (size
, 1))
11090 if (host_integerp (len
, 1) || integer_all_onesp (size
))
11094 if (! integer_all_onesp (size
) && tree_int_cst_lt (size
, len
))
11096 location_t locus
= EXPR_LOCATION (exp
);
11097 warning (0, "%Hcall to %D will always overflow destination buffer",
11098 &locus
, get_callee_fndecl (exp
));
11103 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11104 mem{cpy,pcpy,move,set} is available. */
11107 case BUILT_IN_MEMCPY_CHK
:
11108 fn
= built_in_decls
[BUILT_IN_MEMCPY
];
11110 case BUILT_IN_MEMPCPY_CHK
:
11111 fn
= built_in_decls
[BUILT_IN_MEMPCPY
];
11113 case BUILT_IN_MEMMOVE_CHK
:
11114 fn
= built_in_decls
[BUILT_IN_MEMMOVE
];
11116 case BUILT_IN_MEMSET_CHK
:
11117 fn
= built_in_decls
[BUILT_IN_MEMSET
];
11126 fn
= build_call_expr (fn
, 3, dest
, src
, len
);
11127 if (TREE_CODE (fn
) == CALL_EXPR
)
11128 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11129 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11131 else if (fcode
== BUILT_IN_MEMSET_CHK
)
11135 unsigned int dest_align
11136 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
11138 /* If DEST is not a pointer type, call the normal function. */
11139 if (dest_align
== 0)
11142 /* If SRC and DEST are the same (and not volatile), do nothing. */
11143 if (operand_equal_p (src
, dest
, 0))
11147 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
11149 /* Evaluate and ignore LEN in case it has side-effects. */
11150 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
11151 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
11154 len
= fold_convert (TREE_TYPE (dest
), len
);
11155 expr
= fold_build2 (PLUS_EXPR
, TREE_TYPE (dest
), dest
, len
);
11156 return expand_expr (expr
, target
, mode
, EXPAND_NORMAL
);
11159 /* __memmove_chk special case. */
11160 if (fcode
== BUILT_IN_MEMMOVE_CHK
)
11162 unsigned int src_align
11163 = get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
11165 if (src_align
== 0)
11168 /* If src is categorized for a readonly section we can use
11169 normal __memcpy_chk. */
11170 if (readonly_data_expr (src
))
11172 tree fn
= built_in_decls
[BUILT_IN_MEMCPY_CHK
];
11175 fn
= build_call_expr (fn
, 4, dest
, src
, len
, size
);
11176 if (TREE_CODE (fn
) == CALL_EXPR
)
11177 CALL_EXPR_TAILCALL (fn
) = CALL_EXPR_TAILCALL (exp
);
11178 return expand_expr (fn
, target
, mode
, EXPAND_NORMAL
);
11185 /* Emit warning if a buffer overflow is detected at compile time. */
11188 maybe_emit_chk_warning (tree exp
, enum built_in_function fcode
)
11196 case BUILT_IN_STRCPY_CHK
:
11197 case BUILT_IN_STPCPY_CHK
:
11198 /* For __strcat_chk the warning will be emitted only if overflowing
11199 by at least strlen (dest) + 1 bytes. */
11200 case BUILT_IN_STRCAT_CHK
:
11201 len
= CALL_EXPR_ARG (exp
, 1);
11202 size
= CALL_EXPR_ARG (exp
, 2);
11205 case BUILT_IN_STRNCAT_CHK
:
11206 case BUILT_IN_STRNCPY_CHK
:
11207 len
= CALL_EXPR_ARG (exp
, 2);
11208 size
= CALL_EXPR_ARG (exp
, 3);
11210 case BUILT_IN_SNPRINTF_CHK
:
11211 case BUILT_IN_VSNPRINTF_CHK
:
11212 len
= CALL_EXPR_ARG (exp
, 1);
11213 size
= CALL_EXPR_ARG (exp
, 3);
11216 gcc_unreachable ();
11222 if (! host_integerp (size
, 1) || integer_all_onesp (size
))
11227 len
= c_strlen (len
, 1);
11228 if (! len
|| ! host_integerp (len
, 1) || tree_int_cst_lt (len
, size
))
11231 else if (fcode
== BUILT_IN_STRNCAT_CHK
)
11233 tree src
= CALL_EXPR_ARG (exp
, 1);
11234 if (! src
|| ! host_integerp (len
, 1) || tree_int_cst_lt (len
, size
))
11236 src
= c_strlen (src
, 1);
11237 if (! src
|| ! host_integerp (src
, 1))
11239 locus
= EXPR_LOCATION (exp
);
11240 warning (0, "%Hcall to %D might overflow destination buffer",
11241 &locus
, get_callee_fndecl (exp
));
11244 else if (tree_int_cst_lt (src
, size
))
11247 else if (! host_integerp (len
, 1) || ! tree_int_cst_lt (size
, len
))
11250 locus
= EXPR_LOCATION (exp
);
11251 warning (0, "%Hcall to %D will always overflow destination buffer",
11252 &locus
, get_callee_fndecl (exp
));
11255 /* Emit warning if a buffer overflow is detected at compile time
11256 in __sprintf_chk/__vsprintf_chk calls. */
11259 maybe_emit_sprintf_chk_warning (tree exp
, enum built_in_function fcode
)
11261 tree dest
, size
, len
, fmt
, flag
;
11262 const char *fmt_str
;
11263 int nargs
= call_expr_nargs (exp
);
11265 /* Verify the required arguments in the original call. */
11269 dest
= CALL_EXPR_ARG (exp
, 0);
11270 flag
= CALL_EXPR_ARG (exp
, 1);
11271 size
= CALL_EXPR_ARG (exp
, 2);
11272 fmt
= CALL_EXPR_ARG (exp
, 3);
11274 if (! host_integerp (size
, 1) || integer_all_onesp (size
))
11277 /* Check whether the format is a literal string constant. */
11278 fmt_str
= c_getstr (fmt
);
11279 if (fmt_str
== NULL
)
11282 if (!init_target_chars ())
11285 /* If the format doesn't contain % args or %%, we know its size. */
11286 if (strchr (fmt_str
, target_percent
) == 0)
11287 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
11288 /* If the format is "%s" and first ... argument is a string literal,
11290 else if (fcode
== BUILT_IN_SPRINTF_CHK
11291 && strcmp (fmt_str
, target_percent_s
) == 0)
11297 arg
= CALL_EXPR_ARG (exp
, 4);
11298 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
11301 len
= c_strlen (arg
, 1);
11302 if (!len
|| ! host_integerp (len
, 1))
11308 if (! tree_int_cst_lt (len
, size
))
11310 location_t locus
= EXPR_LOCATION (exp
);
11311 warning (0, "%Hcall to %D will always overflow destination buffer",
11312 &locus
, get_callee_fndecl (exp
));
11316 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11320 fold_builtin_object_size (tree ptr
, tree ost
)
11322 tree ret
= NULL_TREE
;
11323 int object_size_type
;
11325 if (!validate_arg (ptr
, POINTER_TYPE
)
11326 || !validate_arg (ost
, INTEGER_TYPE
))
11331 if (TREE_CODE (ost
) != INTEGER_CST
11332 || tree_int_cst_sgn (ost
) < 0
11333 || compare_tree_int (ost
, 3) > 0)
11336 object_size_type
= tree_low_cst (ost
, 0);
11338 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11339 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11340 and (size_t) 0 for types 2 and 3. */
11341 if (TREE_SIDE_EFFECTS (ptr
))
11342 return build_int_cst_type (size_type_node
, object_size_type
< 2 ? -1 : 0);
11344 if (TREE_CODE (ptr
) == ADDR_EXPR
)
11345 ret
= build_int_cstu (size_type_node
,
11346 compute_builtin_object_size (ptr
, object_size_type
));
11348 else if (TREE_CODE (ptr
) == SSA_NAME
)
11350 unsigned HOST_WIDE_INT bytes
;
11352 /* If object size is not known yet, delay folding until
11353 later. Maybe subsequent passes will help determining
11355 bytes
= compute_builtin_object_size (ptr
, object_size_type
);
11356 if (bytes
!= (unsigned HOST_WIDE_INT
) (object_size_type
< 2
11358 ret
= build_int_cstu (size_type_node
, bytes
);
11363 unsigned HOST_WIDE_INT low
= TREE_INT_CST_LOW (ret
);
11364 HOST_WIDE_INT high
= TREE_INT_CST_HIGH (ret
);
11365 if (fit_double_type (low
, high
, &low
, &high
, TREE_TYPE (ret
)))
11372 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11373 DEST, SRC, LEN, and SIZE are the arguments to the call.
11374 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
11375 code of the builtin. If MAXLEN is not NULL, it is maximum length
11376 passed as third argument. */
11379 fold_builtin_memory_chk (tree fndecl
,
11380 tree dest
, tree src
, tree len
, tree size
,
11381 tree maxlen
, bool ignore
,
11382 enum built_in_function fcode
)
11386 if (!validate_arg (dest
, POINTER_TYPE
)
11387 || !validate_arg (src
,
11388 (fcode
== BUILT_IN_MEMSET_CHK
11389 ? INTEGER_TYPE
: POINTER_TYPE
))
11390 || !validate_arg (len
, INTEGER_TYPE
)
11391 || !validate_arg (size
, INTEGER_TYPE
))
11394 /* If SRC and DEST are the same (and not volatile), return DEST
11395 (resp. DEST+LEN for __mempcpy_chk). */
11396 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
11398 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
11399 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, len
);
11402 tree temp
= fold_convert (TREE_TYPE (dest
), len
);
11403 temp
= fold_build2 (PLUS_EXPR
, TREE_TYPE (dest
), dest
, temp
);
11404 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), temp
);
11408 if (! host_integerp (size
, 1))
11411 if (! integer_all_onesp (size
))
11413 if (! host_integerp (len
, 1))
11415 /* If LEN is not constant, try MAXLEN too.
11416 For MAXLEN only allow optimizing into non-_ocs function
11417 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11418 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
11420 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
11422 /* (void) __mempcpy_chk () can be optimized into
11423 (void) __memcpy_chk (). */
11424 fn
= built_in_decls
[BUILT_IN_MEMCPY_CHK
];
11428 return build_call_expr (fn
, 4, dest
, src
, len
, size
);
11436 if (tree_int_cst_lt (size
, maxlen
))
11441 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11442 mem{cpy,pcpy,move,set} is available. */
11445 case BUILT_IN_MEMCPY_CHK
:
11446 fn
= built_in_decls
[BUILT_IN_MEMCPY
];
11448 case BUILT_IN_MEMPCPY_CHK
:
11449 fn
= built_in_decls
[BUILT_IN_MEMPCPY
];
11451 case BUILT_IN_MEMMOVE_CHK
:
11452 fn
= built_in_decls
[BUILT_IN_MEMMOVE
];
11454 case BUILT_IN_MEMSET_CHK
:
11455 fn
= built_in_decls
[BUILT_IN_MEMSET
];
11464 return build_call_expr (fn
, 3, dest
, src
, len
);
11467 /* Fold a call to the __st[rp]cpy_chk builtin.
11468 DEST, SRC, and SIZE are the arguments to the call.
11469 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
11470 code of the builtin. If MAXLEN is not NULL, it is maximum length of
11471 strings passed as second argument. */
11474 fold_builtin_stxcpy_chk (tree fndecl
, tree dest
, tree src
, tree size
,
11475 tree maxlen
, bool ignore
,
11476 enum built_in_function fcode
)
11480 if (!validate_arg (dest
, POINTER_TYPE
)
11481 || !validate_arg (src
, POINTER_TYPE
)
11482 || !validate_arg (size
, INTEGER_TYPE
))
11485 /* If SRC and DEST are the same (and not volatile), return DEST. */
11486 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
11487 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
11489 if (! host_integerp (size
, 1))
11492 if (! integer_all_onesp (size
))
11494 len
= c_strlen (src
, 1);
11495 if (! len
|| ! host_integerp (len
, 1))
11497 /* If LEN is not constant, try MAXLEN too.
11498 For MAXLEN only allow optimizing into non-_ocs function
11499 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11500 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
11502 if (fcode
== BUILT_IN_STPCPY_CHK
)
11507 /* If return value of __stpcpy_chk is ignored,
11508 optimize into __strcpy_chk. */
11509 fn
= built_in_decls
[BUILT_IN_STRCPY_CHK
];
11513 return build_call_expr (fn
, 3, dest
, src
, size
);
11516 if (! len
|| TREE_SIDE_EFFECTS (len
))
11519 /* If c_strlen returned something, but not a constant,
11520 transform __strcpy_chk into __memcpy_chk. */
11521 fn
= built_in_decls
[BUILT_IN_MEMCPY_CHK
];
11525 len
= size_binop (PLUS_EXPR
, len
, ssize_int (1));
11526 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)),
11527 build_call_expr (fn
, 4,
11528 dest
, src
, len
, size
));
11534 if (! tree_int_cst_lt (maxlen
, size
))
11538 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
11539 fn
= built_in_decls
[fcode
== BUILT_IN_STPCPY_CHK
11540 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
];
11544 return build_call_expr (fn
, 2, dest
, src
);
11547 /* Fold a call to the __strncpy_chk builtin. DEST, SRC, LEN, and SIZE
11548 are the arguments to the call. If MAXLEN is not NULL, it is maximum
11549 length passed as third argument. */
11552 fold_builtin_strncpy_chk (tree dest
, tree src
, tree len
, tree size
,
11557 if (!validate_arg (dest
, POINTER_TYPE
)
11558 || !validate_arg (src
, POINTER_TYPE
)
11559 || !validate_arg (len
, INTEGER_TYPE
)
11560 || !validate_arg (size
, INTEGER_TYPE
))
11563 if (! host_integerp (size
, 1))
11566 if (! integer_all_onesp (size
))
11568 if (! host_integerp (len
, 1))
11570 /* If LEN is not constant, try MAXLEN too.
11571 For MAXLEN only allow optimizing into non-_ocs function
11572 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11573 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
11579 if (tree_int_cst_lt (size
, maxlen
))
11583 /* If __builtin_strncpy_chk is used, assume strncpy is available. */
11584 fn
= built_in_decls
[BUILT_IN_STRNCPY
];
11588 return build_call_expr (fn
, 3, dest
, src
, len
);
11591 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
11592 are the arguments to the call. */
11595 fold_builtin_strcat_chk (tree fndecl
, tree dest
, tree src
, tree size
)
11600 if (!validate_arg (dest
, POINTER_TYPE
)
11601 || !validate_arg (src
, POINTER_TYPE
)
11602 || !validate_arg (size
, INTEGER_TYPE
))
11605 p
= c_getstr (src
);
11606 /* If the SRC parameter is "", return DEST. */
11607 if (p
&& *p
== '\0')
11608 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
11610 if (! host_integerp (size
, 1) || ! integer_all_onesp (size
))
11613 /* If __builtin_strcat_chk is used, assume strcat is available. */
11614 fn
= built_in_decls
[BUILT_IN_STRCAT
];
11618 return build_call_expr (fn
, 2, dest
, src
);
11621 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
11625 fold_builtin_strncat_chk (tree fndecl
,
11626 tree dest
, tree src
, tree len
, tree size
)
11631 if (!validate_arg (dest
, POINTER_TYPE
)
11632 || !validate_arg (src
, POINTER_TYPE
)
11633 || !validate_arg (size
, INTEGER_TYPE
)
11634 || !validate_arg (size
, INTEGER_TYPE
))
11637 p
= c_getstr (src
);
11638 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
11639 if (p
&& *p
== '\0')
11640 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, len
);
11641 else if (integer_zerop (len
))
11642 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
11644 if (! host_integerp (size
, 1))
11647 if (! integer_all_onesp (size
))
11649 tree src_len
= c_strlen (src
, 1);
11651 && host_integerp (src_len
, 1)
11652 && host_integerp (len
, 1)
11653 && ! tree_int_cst_lt (len
, src_len
))
11655 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
11656 fn
= built_in_decls
[BUILT_IN_STRCAT_CHK
];
11660 return build_call_expr (fn
, 3, dest
, src
, size
);
11665 /* If __builtin_strncat_chk is used, assume strncat is available. */
11666 fn
= built_in_decls
[BUILT_IN_STRNCAT
];
11670 return build_call_expr (fn
, 3, dest
, src
, len
);
11673 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
11674 a normal call should be emitted rather than expanding the function
11675 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
11678 fold_builtin_sprintf_chk (tree exp
, enum built_in_function fcode
)
11680 tree dest
, size
, len
, fn
, fmt
, flag
;
11681 const char *fmt_str
;
11682 int nargs
= call_expr_nargs (exp
);
11684 /* Verify the required arguments in the original call. */
11687 dest
= CALL_EXPR_ARG (exp
, 0);
11688 if (!validate_arg (dest
, POINTER_TYPE
))
11690 flag
= CALL_EXPR_ARG (exp
, 1);
11691 if (!validate_arg (flag
, INTEGER_TYPE
))
11693 size
= CALL_EXPR_ARG (exp
, 2);
11694 if (!validate_arg (size
, INTEGER_TYPE
))
11696 fmt
= CALL_EXPR_ARG (exp
, 3);
11697 if (!validate_arg (fmt
, POINTER_TYPE
))
11700 if (! host_integerp (size
, 1))
11705 if (!init_target_chars ())
11708 /* Check whether the format is a literal string constant. */
11709 fmt_str
= c_getstr (fmt
);
11710 if (fmt_str
!= NULL
)
11712 /* If the format doesn't contain % args or %%, we know the size. */
11713 if (strchr (fmt_str
, target_percent
) == 0)
11715 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
11716 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
11718 /* If the format is "%s" and first ... argument is a string literal,
11719 we know the size too. */
11720 else if (fcode
== BUILT_IN_SPRINTF_CHK
11721 && strcmp (fmt_str
, target_percent_s
) == 0)
11727 arg
= CALL_EXPR_ARG (exp
, 4);
11728 if (validate_arg (arg
, POINTER_TYPE
))
11730 len
= c_strlen (arg
, 1);
11731 if (! len
|| ! host_integerp (len
, 1))
11738 if (! integer_all_onesp (size
))
11740 if (! len
|| ! tree_int_cst_lt (len
, size
))
11744 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
11745 or if format doesn't contain % chars or is "%s". */
11746 if (! integer_zerop (flag
))
11748 if (fmt_str
== NULL
)
11750 if (strchr (fmt_str
, target_percent
) != NULL
11751 && strcmp (fmt_str
, target_percent_s
))
11755 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
11756 fn
= built_in_decls
[fcode
== BUILT_IN_VSPRINTF_CHK
11757 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
];
11761 return rewrite_call_expr (exp
, 4, fn
, 2, dest
, fmt
);
11764 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
11765 a normal call should be emitted rather than expanding the function
11766 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
11767 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
11768 passed as second argument. */
11771 fold_builtin_snprintf_chk (tree exp
, tree maxlen
,
11772 enum built_in_function fcode
)
11774 tree dest
, size
, len
, fn
, fmt
, flag
;
11775 const char *fmt_str
;
11777 /* Verify the required arguments in the original call. */
11778 if (call_expr_nargs (exp
) < 5)
11780 dest
= CALL_EXPR_ARG (exp
, 0);
11781 if (!validate_arg (dest
, POINTER_TYPE
))
11783 len
= CALL_EXPR_ARG (exp
, 1);
11784 if (!validate_arg (len
, INTEGER_TYPE
))
11786 flag
= CALL_EXPR_ARG (exp
, 2);
11787 if (!validate_arg (flag
, INTEGER_TYPE
))
11789 size
= CALL_EXPR_ARG (exp
, 3);
11790 if (!validate_arg (size
, INTEGER_TYPE
))
11792 fmt
= CALL_EXPR_ARG (exp
, 4);
11793 if (!validate_arg (fmt
, POINTER_TYPE
))
11796 if (! host_integerp (size
, 1))
11799 if (! integer_all_onesp (size
))
11801 if (! host_integerp (len
, 1))
11803 /* If LEN is not constant, try MAXLEN too.
11804 For MAXLEN only allow optimizing into non-_ocs function
11805 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
11806 if (maxlen
== NULL_TREE
|| ! host_integerp (maxlen
, 1))
11812 if (tree_int_cst_lt (size
, maxlen
))
11816 if (!init_target_chars ())
11819 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
11820 or if format doesn't contain % chars or is "%s". */
11821 if (! integer_zerop (flag
))
11823 fmt_str
= c_getstr (fmt
);
11824 if (fmt_str
== NULL
)
11826 if (strchr (fmt_str
, target_percent
) != NULL
11827 && strcmp (fmt_str
, target_percent_s
))
11831 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
11833 fn
= built_in_decls
[fcode
== BUILT_IN_VSNPRINTF_CHK
11834 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
];
11838 return rewrite_call_expr (exp
, 5, fn
, 3, dest
, len
, fmt
);
11841 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
11842 FMT and ARG are the arguments to the call; we don't fold cases with
11843 more than 2 arguments, and ARG may be null if this is a 1-argument case.
11845 Return NULL_TREE if no simplification was possible, otherwise return the
11846 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11847 code of the function to be simplified. */
11850 fold_builtin_printf (tree fndecl
, tree fmt
, tree arg
, bool ignore
,
11851 enum built_in_function fcode
)
11853 tree fn_putchar
, fn_puts
, newarg
, call
= NULL_TREE
;
11854 const char *fmt_str
= NULL
;
11856 /* If the return value is used, don't do the transformation. */
11860 /* Verify the required arguments in the original call. */
11861 if (!validate_arg (fmt
, POINTER_TYPE
))
11864 /* Check whether the format is a literal string constant. */
11865 fmt_str
= c_getstr (fmt
);
11866 if (fmt_str
== NULL
)
11869 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
11871 /* If we're using an unlocked function, assume the other
11872 unlocked functions exist explicitly. */
11873 fn_putchar
= built_in_decls
[BUILT_IN_PUTCHAR_UNLOCKED
];
11874 fn_puts
= built_in_decls
[BUILT_IN_PUTS_UNLOCKED
];
11878 fn_putchar
= implicit_built_in_decls
[BUILT_IN_PUTCHAR
];
11879 fn_puts
= implicit_built_in_decls
[BUILT_IN_PUTS
];
11882 if (!init_target_chars ())
11885 if (strcmp (fmt_str
, target_percent_s
) == 0
11886 || strchr (fmt_str
, target_percent
) == NULL
)
11890 if (strcmp (fmt_str
, target_percent_s
) == 0)
11892 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
11895 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
11898 str
= c_getstr (arg
);
11904 /* The format specifier doesn't contain any '%' characters. */
11905 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
11911 /* If the string was "", printf does nothing. */
11912 if (str
[0] == '\0')
11913 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
11915 /* If the string has length of 1, call putchar. */
11916 if (str
[1] == '\0')
11918 /* Given printf("c"), (where c is any one character,)
11919 convert "c"[0] to an int and pass that to the replacement
11921 newarg
= build_int_cst (NULL_TREE
, str
[0]);
11923 call
= build_call_expr (fn_putchar
, 1, newarg
);
11927 /* If the string was "string\n", call puts("string"). */
11928 size_t len
= strlen (str
);
11929 if ((unsigned char)str
[len
- 1] == target_newline
)
11931 /* Create a NUL-terminated string that's one char shorter
11932 than the original, stripping off the trailing '\n'. */
11933 char *newstr
= alloca (len
);
11934 memcpy (newstr
, str
, len
- 1);
11935 newstr
[len
- 1] = 0;
11937 newarg
= build_string_literal (len
, newstr
);
11939 call
= build_call_expr (fn_puts
, 1, newarg
);
11942 /* We'd like to arrange to call fputs(string,stdout) here,
11943 but we need stdout and don't have a way to get it yet. */
11948 /* The other optimizations can be done only on the non-va_list variants. */
11949 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
11952 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
11953 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
11955 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
11958 call
= build_call_expr (fn_puts
, 1, arg
);
11961 /* If the format specifier was "%c", call __builtin_putchar(arg). */
11962 else if (strcmp (fmt_str
, target_percent_c
) == 0)
11964 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
11967 call
= build_call_expr (fn_putchar
, 1, arg
);
11973 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), call
);
11976 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
11977 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
11978 more than 3 arguments, and ARG may be null in the 2-argument case.
11980 Return NULL_TREE if no simplification was possible, otherwise return the
11981 simplified form of the call as a tree. FCODE is the BUILT_IN_*
11982 code of the function to be simplified. */
11985 fold_builtin_fprintf (tree fndecl
, tree fp
, tree fmt
, tree arg
, bool ignore
,
11986 enum built_in_function fcode
)
11988 tree fn_fputc
, fn_fputs
, call
= NULL_TREE
;
11989 const char *fmt_str
= NULL
;
11991 /* If the return value is used, don't do the transformation. */
11995 /* Verify the required arguments in the original call. */
11996 if (!validate_arg (fp
, POINTER_TYPE
))
11998 if (!validate_arg (fmt
, POINTER_TYPE
))
12001 /* Check whether the format is a literal string constant. */
12002 fmt_str
= c_getstr (fmt
);
12003 if (fmt_str
== NULL
)
12006 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
12008 /* If we're using an unlocked function, assume the other
12009 unlocked functions exist explicitly. */
12010 fn_fputc
= built_in_decls
[BUILT_IN_FPUTC_UNLOCKED
];
12011 fn_fputs
= built_in_decls
[BUILT_IN_FPUTS_UNLOCKED
];
12015 fn_fputc
= implicit_built_in_decls
[BUILT_IN_FPUTC
];
12016 fn_fputs
= implicit_built_in_decls
[BUILT_IN_FPUTS
];
12019 if (!init_target_chars ())
12022 /* If the format doesn't contain % args or %%, use strcpy. */
12023 if (strchr (fmt_str
, target_percent
) == NULL
)
12025 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
12029 /* If the format specifier was "", fprintf does nothing. */
12030 if (fmt_str
[0] == '\0')
12032 /* If FP has side-effects, just wait until gimplification is
12034 if (TREE_SIDE_EFFECTS (fp
))
12037 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), 0);
12040 /* When "string" doesn't contain %, replace all cases of
12041 fprintf (fp, string) with fputs (string, fp). The fputs
12042 builtin will take care of special cases like length == 1. */
12044 call
= build_call_expr (fn_fputs
, 2, fmt
, fp
);
12047 /* The other optimizations can be done only on the non-va_list variants. */
12048 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
12051 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
12052 else if (strcmp (fmt_str
, target_percent_s
) == 0)
12054 if (!arg
|| !validate_arg (arg
, POINTER_TYPE
))
12057 call
= build_call_expr (fn_fputs
, 2, arg
, fp
);
12060 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
12061 else if (strcmp (fmt_str
, target_percent_c
) == 0)
12063 if (!arg
|| !validate_arg (arg
, INTEGER_TYPE
))
12066 call
= build_call_expr (fn_fputc
, 2, arg
, fp
);
12071 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), call
);
12074 /* Initialize format string characters in the target charset. */
12077 init_target_chars (void)
12082 target_newline
= lang_hooks
.to_target_charset ('\n');
12083 target_percent
= lang_hooks
.to_target_charset ('%');
12084 target_c
= lang_hooks
.to_target_charset ('c');
12085 target_s
= lang_hooks
.to_target_charset ('s');
12086 if (target_newline
== 0 || target_percent
== 0 || target_c
== 0
12090 target_percent_c
[0] = target_percent
;
12091 target_percent_c
[1] = target_c
;
12092 target_percent_c
[2] = '\0';
12094 target_percent_s
[0] = target_percent
;
12095 target_percent_s
[1] = target_s
;
12096 target_percent_s
[2] = '\0';
12098 target_percent_s_newline
[0] = target_percent
;
12099 target_percent_s_newline
[1] = target_s
;
12100 target_percent_s_newline
[2] = target_newline
;
12101 target_percent_s_newline
[3] = '\0';
12108 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
12109 and no overflow/underflow occurred. INEXACT is true if M was not
12110 exactly calculated. TYPE is the tree type for the result. This
12111 function assumes that you cleared the MPFR flags and then
12112 calculated M to see if anything subsequently set a flag prior to
12113 entering this function. Return NULL_TREE if any checks fail. */
12116 do_mpfr_ckconv (mpfr_srcptr m
, tree type
, int inexact
)
12118 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
12119 overflow/underflow occurred. If -frounding-math, proceed iff the
12120 result of calling FUNC was exact. */
12121 if (mpfr_number_p (m
) && !mpfr_overflow_p () && !mpfr_underflow_p ()
12122 && (!flag_rounding_math
|| !inexact
))
12124 REAL_VALUE_TYPE rr
;
12126 real_from_mpfr (&rr
, m
);
12127 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
12128 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
12129 but the mpft_t is not, then we underflowed in the
12131 if (!real_isnan (&rr
) && !real_isinf (&rr
)
12132 && (rr
.cl
== rvc_zero
) == (mpfr_zero_p (m
) != 0))
12134 REAL_VALUE_TYPE rmode
;
12136 real_convert (&rmode
, TYPE_MODE (type
), &rr
);
12137 /* Proceed iff the specified mode can hold the value. */
12138 if (real_identical (&rmode
, &rr
))
12139 return build_real (type
, rmode
);
12145 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
12146 FUNC on it and return the resulting value as a tree with type TYPE.
12147 If MIN and/or MAX are not NULL, then the supplied ARG must be
12148 within those bounds. If INCLUSIVE is true, then MIN/MAX are
12149 acceptable values, otherwise they are not. The mpfr precision is
12150 set to the precision of TYPE. We assume that function FUNC returns
12151 zero if the result could be calculated exactly within the requested
12155 do_mpfr_arg1 (tree arg
, tree type
, int (*func
)(mpfr_ptr
, mpfr_srcptr
, mp_rnd_t
),
12156 const REAL_VALUE_TYPE
*min
, const REAL_VALUE_TYPE
*max
,
12159 tree result
= NULL_TREE
;
12163 /* To proceed, MPFR must exactly represent the target floating point
12164 format, which only happens when the target base equals two. */
12165 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12166 && TREE_CODE (arg
) == REAL_CST
&& !TREE_OVERFLOW (arg
))
12168 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
12170 if (!real_isnan (ra
) && !real_isinf (ra
)
12171 && (!min
|| real_compare (inclusive
? GE_EXPR
: GT_EXPR
, ra
, min
))
12172 && (!max
|| real_compare (inclusive
? LE_EXPR
: LT_EXPR
, ra
, max
)))
12174 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12178 mpfr_init2 (m
, prec
);
12179 mpfr_from_real (m
, ra
);
12180 mpfr_clear_flags ();
12181 inexact
= func (m
, m
, GMP_RNDN
);
12182 result
= do_mpfr_ckconv (m
, type
, inexact
);
12190 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
12191 FUNC on it and return the resulting value as a tree with type TYPE.
12192 The mpfr precision is set to the precision of TYPE. We assume that
12193 function FUNC returns zero if the result could be calculated
12194 exactly within the requested precision. */
12197 do_mpfr_arg2 (tree arg1
, tree arg2
, tree type
,
12198 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
12200 tree result
= NULL_TREE
;
12205 /* To proceed, MPFR must exactly represent the target floating point
12206 format, which only happens when the target base equals two. */
12207 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12208 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
12209 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
))
12211 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
12212 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
12214 if (!real_isnan (ra1
) && !real_isinf (ra1
)
12215 && !real_isnan (ra2
) && !real_isinf (ra2
))
12217 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12221 mpfr_inits2 (prec
, m1
, m2
, NULL
);
12222 mpfr_from_real (m1
, ra1
);
12223 mpfr_from_real (m2
, ra2
);
12224 mpfr_clear_flags ();
12225 inexact
= func (m1
, m1
, m2
, GMP_RNDN
);
12226 result
= do_mpfr_ckconv (m1
, type
, inexact
);
12227 mpfr_clears (m1
, m2
, NULL
);
12234 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
12235 FUNC on it and return the resulting value as a tree with type TYPE.
12236 The mpfr precision is set to the precision of TYPE. We assume that
12237 function FUNC returns zero if the result could be calculated
12238 exactly within the requested precision. */
12241 do_mpfr_arg3 (tree arg1
, tree arg2
, tree arg3
, tree type
,
12242 int (*func
)(mpfr_ptr
, mpfr_srcptr
, mpfr_srcptr
, mpfr_srcptr
, mp_rnd_t
))
12244 tree result
= NULL_TREE
;
12250 /* To proceed, MPFR must exactly represent the target floating point
12251 format, which only happens when the target base equals two. */
12252 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12253 && TREE_CODE (arg1
) == REAL_CST
&& !TREE_OVERFLOW (arg1
)
12254 && TREE_CODE (arg2
) == REAL_CST
&& !TREE_OVERFLOW (arg2
)
12255 && TREE_CODE (arg3
) == REAL_CST
&& !TREE_OVERFLOW (arg3
))
12257 const REAL_VALUE_TYPE
*const ra1
= &TREE_REAL_CST (arg1
);
12258 const REAL_VALUE_TYPE
*const ra2
= &TREE_REAL_CST (arg2
);
12259 const REAL_VALUE_TYPE
*const ra3
= &TREE_REAL_CST (arg3
);
12261 if (!real_isnan (ra1
) && !real_isinf (ra1
)
12262 && !real_isnan (ra2
) && !real_isinf (ra2
)
12263 && !real_isnan (ra3
) && !real_isinf (ra3
))
12265 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12269 mpfr_inits2 (prec
, m1
, m2
, m3
, NULL
);
12270 mpfr_from_real (m1
, ra1
);
12271 mpfr_from_real (m2
, ra2
);
12272 mpfr_from_real (m3
, ra3
);
12273 mpfr_clear_flags ();
12274 inexact
= func (m1
, m1
, m2
, m3
, GMP_RNDN
);
12275 result
= do_mpfr_ckconv (m1
, type
, inexact
);
12276 mpfr_clears (m1
, m2
, m3
, NULL
);
12283 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
12284 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
12285 If ARG_SINP and ARG_COSP are NULL then the result is returned
12286 as a complex value.
12287 The type is taken from the type of ARG and is used for setting the
12288 precision of the calculation and results. */
12291 do_mpfr_sincos (tree arg
, tree arg_sinp
, tree arg_cosp
)
12293 tree
const type
= TREE_TYPE (arg
);
12294 tree result
= NULL_TREE
;
12298 /* To proceed, MPFR must exactly represent the target floating point
12299 format, which only happens when the target base equals two. */
12300 if (REAL_MODE_FORMAT (TYPE_MODE (type
))->b
== 2
12301 && TREE_CODE (arg
) == REAL_CST
12302 && !TREE_OVERFLOW (arg
))
12304 const REAL_VALUE_TYPE
*const ra
= &TREE_REAL_CST (arg
);
12306 if (!real_isnan (ra
) && !real_isinf (ra
))
12308 const int prec
= REAL_MODE_FORMAT (TYPE_MODE (type
))->p
;
12309 tree result_s
, result_c
;
12313 mpfr_inits2 (prec
, m
, ms
, mc
, NULL
);
12314 mpfr_from_real (m
, ra
);
12315 mpfr_clear_flags ();
12316 inexact
= mpfr_sin_cos (ms
, mc
, m
, GMP_RNDN
);
12317 result_s
= do_mpfr_ckconv (ms
, type
, inexact
);
12318 result_c
= do_mpfr_ckconv (mc
, type
, inexact
);
12319 mpfr_clears (m
, ms
, mc
, NULL
);
12320 if (result_s
&& result_c
)
12322 /* If we are to return in a complex value do so. */
12323 if (!arg_sinp
&& !arg_cosp
)
12324 return build_complex (build_complex_type (type
),
12325 result_c
, result_s
);
12327 /* Dereference the sin/cos pointer arguments. */
12328 arg_sinp
= build_fold_indirect_ref (arg_sinp
);
12329 arg_cosp
= build_fold_indirect_ref (arg_cosp
);
12330 /* Proceed if valid pointer type were passed in. */
12331 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp
)) == TYPE_MAIN_VARIANT (type
)
12332 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp
)) == TYPE_MAIN_VARIANT (type
))
12334 /* Set the values. */
12335 result_s
= fold_build2 (MODIFY_EXPR
, type
, arg_sinp
,
12337 TREE_SIDE_EFFECTS (result_s
) = 1;
12338 result_c
= fold_build2 (MODIFY_EXPR
, type
, arg_cosp
,
12340 TREE_SIDE_EFFECTS (result_c
) = 1;
12341 /* Combine the assignments into a compound expr. */
12342 result
= non_lvalue (fold_build2 (COMPOUND_EXPR
, type
,
12343 result_s
, result_c
));