1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
30 #include "tree-gimple.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
42 #include "typeclass.h"
47 #include "langhooks.h"
48 #include "basic-block.h"
49 #include "tree-mudflap.h"
51 #ifndef PAD_VARARGS_DOWN
52 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
55 /* Define the names of the builtin function types and codes. */
56 const char *const built_in_class_names
[4]
57 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
59 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
60 const char * built_in_names
[(int) END_BUILTINS
] =
62 #include "builtins.def"
66 /* Setup an array of _DECL trees, make sure each element is
67 initialized to NULL_TREE. */
68 tree built_in_decls
[(int) END_BUILTINS
];
69 /* Declarations used when constructing the builtin implicitly in the compiler.
70 It may be NULL_TREE when this is invalid (for instance runtime is not
71 required to implement the function call in all cases). */
72 tree implicit_built_in_decls
[(int) END_BUILTINS
];
74 static int get_pointer_alignment (tree
, unsigned int);
75 static const char *c_getstr (tree
);
76 static rtx
c_readstr (const char *, enum machine_mode
);
77 static int target_char_cast (tree
, char *);
78 static rtx
get_memory_rtx (tree
);
79 static tree
build_string_literal (int, const char *);
80 static int apply_args_size (void);
81 static int apply_result_size (void);
82 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
83 static rtx
result_vector (int, rtx
);
85 static rtx
expand_builtin_setjmp (tree
, rtx
);
86 static void expand_builtin_update_setjmp_buf (rtx
);
87 static void expand_builtin_prefetch (tree
);
88 static rtx
expand_builtin_apply_args (void);
89 static rtx
expand_builtin_apply_args_1 (void);
90 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
91 static void expand_builtin_return (rtx
);
92 static enum type_class
type_to_class (tree
);
93 static rtx
expand_builtin_classify_type (tree
);
94 static void expand_errno_check (tree
, rtx
);
95 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
96 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
97 static rtx
expand_builtin_mathfn_3 (tree
, rtx
, rtx
);
98 static rtx
expand_builtin_int_roundingfn (tree
, rtx
, rtx
);
99 static rtx
expand_builtin_args_info (tree
);
100 static rtx
expand_builtin_next_arg (void);
101 static rtx
expand_builtin_va_start (tree
);
102 static rtx
expand_builtin_va_end (tree
);
103 static rtx
expand_builtin_va_copy (tree
);
104 static rtx
expand_builtin_memcmp (tree
, tree
, rtx
, enum machine_mode
);
105 static rtx
expand_builtin_strcmp (tree
, rtx
, enum machine_mode
);
106 static rtx
expand_builtin_strncmp (tree
, rtx
, enum machine_mode
);
107 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
108 static rtx
expand_builtin_strcat (tree
, tree
, rtx
, enum machine_mode
);
109 static rtx
expand_builtin_strncat (tree
, rtx
, enum machine_mode
);
110 static rtx
expand_builtin_strspn (tree
, rtx
, enum machine_mode
);
111 static rtx
expand_builtin_strcspn (tree
, rtx
, enum machine_mode
);
112 static rtx
expand_builtin_memcpy (tree
, rtx
, enum machine_mode
);
113 static rtx
expand_builtin_mempcpy (tree
, tree
, rtx
, enum machine_mode
, int);
114 static rtx
expand_builtin_memmove (tree
, tree
, rtx
, enum machine_mode
);
115 static rtx
expand_builtin_bcopy (tree
, tree
);
116 static rtx
expand_builtin_strcpy (tree
, rtx
, enum machine_mode
);
117 static rtx
expand_builtin_stpcpy (tree
, rtx
, enum machine_mode
);
118 static rtx
builtin_strncpy_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
119 static rtx
expand_builtin_strncpy (tree
, rtx
, enum machine_mode
);
120 static rtx
builtin_memset_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
121 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, enum machine_mode
);
122 static rtx
expand_builtin_memset (tree
, rtx
, enum machine_mode
);
123 static rtx
expand_builtin_bzero (tree
);
124 static rtx
expand_builtin_strlen (tree
, rtx
, enum machine_mode
);
125 static rtx
expand_builtin_strstr (tree
, tree
, rtx
, enum machine_mode
);
126 static rtx
expand_builtin_strpbrk (tree
, tree
, rtx
, enum machine_mode
);
127 static rtx
expand_builtin_strchr (tree
, tree
, rtx
, enum machine_mode
);
128 static rtx
expand_builtin_strrchr (tree
, tree
, rtx
, enum machine_mode
);
129 static rtx
expand_builtin_alloca (tree
, rtx
);
130 static rtx
expand_builtin_unop (enum machine_mode
, tree
, rtx
, rtx
, optab
);
131 static rtx
expand_builtin_frame_address (tree
, tree
);
132 static rtx
expand_builtin_fputs (tree
, rtx
, bool);
133 static rtx
expand_builtin_printf (tree
, rtx
, enum machine_mode
, bool);
134 static rtx
expand_builtin_fprintf (tree
, rtx
, enum machine_mode
, bool);
135 static rtx
expand_builtin_sprintf (tree
, rtx
, enum machine_mode
);
136 static tree
stabilize_va_list (tree
, int);
137 static rtx
expand_builtin_expect (tree
, rtx
);
138 static tree
fold_builtin_constant_p (tree
);
139 static tree
fold_builtin_classify_type (tree
);
140 static tree
fold_builtin_strlen (tree
);
141 static tree
fold_builtin_inf (tree
, int);
142 static tree
fold_builtin_nan (tree
, tree
, int);
143 static int validate_arglist (tree
, ...);
144 static bool integer_valued_real_p (tree
);
145 static tree
fold_trunc_transparent_mathfn (tree
, tree
);
146 static bool readonly_data_expr (tree
);
147 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
148 static rtx
expand_builtin_signbit (tree
, rtx
);
149 static tree
fold_builtin_cabs (tree
, tree
);
150 static tree
fold_builtin_sqrt (tree
, tree
);
151 static tree
fold_builtin_cbrt (tree
, tree
);
152 static tree
fold_builtin_pow (tree
, tree
, tree
);
153 static tree
fold_builtin_powi (tree
, tree
, tree
);
154 static tree
fold_builtin_sin (tree
);
155 static tree
fold_builtin_cos (tree
, tree
, tree
);
156 static tree
fold_builtin_tan (tree
);
157 static tree
fold_builtin_atan (tree
, tree
);
158 static tree
fold_builtin_trunc (tree
, tree
);
159 static tree
fold_builtin_floor (tree
, tree
);
160 static tree
fold_builtin_ceil (tree
, tree
);
161 static tree
fold_builtin_round (tree
, tree
);
162 static tree
fold_builtin_int_roundingfn (tree
, tree
);
163 static tree
fold_builtin_bitop (tree
, tree
);
164 static tree
fold_builtin_memcpy (tree
, tree
);
165 static tree
fold_builtin_mempcpy (tree
, tree
, int);
166 static tree
fold_builtin_memmove (tree
, tree
);
167 static tree
fold_builtin_strchr (tree
, tree
);
168 static tree
fold_builtin_memcmp (tree
);
169 static tree
fold_builtin_strcmp (tree
);
170 static tree
fold_builtin_strncmp (tree
);
171 static tree
fold_builtin_signbit (tree
, tree
);
172 static tree
fold_builtin_copysign (tree
, tree
, tree
);
173 static tree
fold_builtin_isascii (tree
);
174 static tree
fold_builtin_toascii (tree
);
175 static tree
fold_builtin_isdigit (tree
);
176 static tree
fold_builtin_fabs (tree
, tree
);
177 static tree
fold_builtin_abs (tree
, tree
);
178 static tree
fold_builtin_unordered_cmp (tree
, tree
, enum tree_code
,
180 static tree
fold_builtin_1 (tree
, tree
, bool);
182 static tree
fold_builtin_strpbrk (tree
, tree
);
183 static tree
fold_builtin_strstr (tree
, tree
);
184 static tree
fold_builtin_strrchr (tree
, tree
);
185 static tree
fold_builtin_strcat (tree
);
186 static tree
fold_builtin_strncat (tree
);
187 static tree
fold_builtin_strspn (tree
);
188 static tree
fold_builtin_strcspn (tree
);
189 static tree
fold_builtin_sprintf (tree
, int);
191 /* Return true if NODE should be considered for inline expansion regardless
192 of the optimization level. This means whenever a function is invoked with
193 its "internal" name, which normally contains the prefix "__builtin". */
195 static bool called_as_built_in (tree node
)
197 const char *name
= IDENTIFIER_POINTER (DECL_NAME (node
));
198 if (strncmp (name
, "__builtin_", 10) == 0)
200 if (strncmp (name
, "__sync_", 7) == 0)
205 /* Return the alignment in bits of EXP, a pointer valued expression.
206 But don't return more than MAX_ALIGN no matter what.
207 The alignment returned is, by default, the alignment of the thing that
208 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
210 Otherwise, look at the expression to see if we can do better, i.e., if the
211 expression is actually pointing at an object whose alignment is tighter. */
214 get_pointer_alignment (tree exp
, unsigned int max_align
)
216 unsigned int align
, inner
;
218 if (! POINTER_TYPE_P (TREE_TYPE (exp
)))
221 align
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
222 align
= MIN (align
, max_align
);
226 switch (TREE_CODE (exp
))
230 case NON_LVALUE_EXPR
:
231 exp
= TREE_OPERAND (exp
, 0);
232 if (! POINTER_TYPE_P (TREE_TYPE (exp
)))
235 inner
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
236 align
= MIN (inner
, max_align
);
240 /* If sum of pointer + int, restrict our maximum alignment to that
241 imposed by the integer. If not, we can't do any better than
243 if (! host_integerp (TREE_OPERAND (exp
, 1), 1))
246 while (((tree_low_cst (TREE_OPERAND (exp
, 1), 1))
247 & (max_align
/ BITS_PER_UNIT
- 1))
251 exp
= TREE_OPERAND (exp
, 0);
255 /* See what we are pointing at and look at its alignment. */
256 exp
= TREE_OPERAND (exp
, 0);
257 if (TREE_CODE (exp
) == FUNCTION_DECL
)
258 align
= FUNCTION_BOUNDARY
;
259 else if (DECL_P (exp
))
260 align
= DECL_ALIGN (exp
);
261 #ifdef CONSTANT_ALIGNMENT
262 else if (CONSTANT_CLASS_P (exp
))
263 align
= CONSTANT_ALIGNMENT (exp
, align
);
265 return MIN (align
, max_align
);
273 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
274 way, because it could contain a zero byte in the middle.
275 TREE_STRING_LENGTH is the size of the character array, not the string.
277 ONLY_VALUE should be nonzero if the result is not going to be emitted
278 into the instruction stream and zero if it is going to be expanded.
279 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
280 is returned, otherwise NULL, since
281 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
282 evaluate the side-effects.
284 The value returned is of type `ssizetype'.
286 Unfortunately, string_constant can't access the values of const char
287 arrays with initializers, so neither can we do so here. */
290 c_strlen (tree src
, int only_value
)
293 HOST_WIDE_INT offset
;
298 if (TREE_CODE (src
) == COND_EXPR
299 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
303 len1
= c_strlen (TREE_OPERAND (src
, 1), only_value
);
304 len2
= c_strlen (TREE_OPERAND (src
, 2), only_value
);
305 if (tree_int_cst_equal (len1
, len2
))
309 if (TREE_CODE (src
) == COMPOUND_EXPR
310 && (only_value
|| !TREE_SIDE_EFFECTS (TREE_OPERAND (src
, 0))))
311 return c_strlen (TREE_OPERAND (src
, 1), only_value
);
313 src
= string_constant (src
, &offset_node
);
317 max
= TREE_STRING_LENGTH (src
) - 1;
318 ptr
= TREE_STRING_POINTER (src
);
320 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
322 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
323 compute the offset to the following null if we don't know where to
324 start searching for it. */
327 for (i
= 0; i
< max
; i
++)
331 /* We don't know the starting offset, but we do know that the string
332 has no internal zero bytes. We can assume that the offset falls
333 within the bounds of the string; otherwise, the programmer deserves
334 what he gets. Subtract the offset from the length of the string,
335 and return that. This would perhaps not be valid if we were dealing
336 with named arrays in addition to literal string constants. */
338 return size_diffop (size_int (max
), offset_node
);
341 /* We have a known offset into the string. Start searching there for
342 a null character if we can represent it as a single HOST_WIDE_INT. */
343 if (offset_node
== 0)
345 else if (! host_integerp (offset_node
, 0))
348 offset
= tree_low_cst (offset_node
, 0);
350 /* If the offset is known to be out of bounds, warn, and call strlen at
352 if (offset
< 0 || offset
> max
)
354 warning ("offset outside bounds of constant string");
358 /* Use strlen to search for the first zero byte. Since any strings
359 constructed with build_string will have nulls appended, we win even
360 if we get handed something like (char[4])"abcd".
362 Since OFFSET is our starting index into the string, no further
363 calculation is needed. */
364 return ssize_int (strlen (ptr
+ offset
));
367 /* Return a char pointer for a C string if it is a string constant
368 or sum of string constant and integer constant. */
375 src
= string_constant (src
, &offset_node
);
379 if (offset_node
== 0)
380 return TREE_STRING_POINTER (src
);
381 else if (!host_integerp (offset_node
, 1)
382 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
385 return TREE_STRING_POINTER (src
) + tree_low_cst (offset_node
, 1);
388 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
389 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
392 c_readstr (const char *str
, enum machine_mode mode
)
398 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
);
403 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
406 if (WORDS_BIG_ENDIAN
)
407 j
= GET_MODE_SIZE (mode
) - i
- 1;
408 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
409 && GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
410 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
412 gcc_assert (j
<= 2 * HOST_BITS_PER_WIDE_INT
);
415 ch
= (unsigned char) str
[i
];
416 c
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
418 return immed_double_const (c
[0], c
[1], mode
);
421 /* Cast a target constant CST to target CHAR and if that value fits into
422 host char type, return zero and put that value into variable pointed by
426 target_char_cast (tree cst
, char *p
)
428 unsigned HOST_WIDE_INT val
, hostval
;
430 if (!host_integerp (cst
, 1)
431 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
434 val
= tree_low_cst (cst
, 1);
435 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
436 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
439 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
440 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
449 /* Similar to save_expr, but assumes that arbitrary code is not executed
450 in between the multiple evaluations. In particular, we assume that a
451 non-addressable local variable will not be modified. */
454 builtin_save_expr (tree exp
)
456 if (TREE_ADDRESSABLE (exp
) == 0
457 && (TREE_CODE (exp
) == PARM_DECL
458 || (TREE_CODE (exp
) == VAR_DECL
&& !TREE_STATIC (exp
))))
461 return save_expr (exp
);
464 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
465 times to get the address of either a higher stack frame, or a return
466 address located within it (depending on FNDECL_CODE). */
469 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
)
473 #ifdef INITIAL_FRAME_ADDRESS_RTX
474 rtx tem
= INITIAL_FRAME_ADDRESS_RTX
;
476 rtx tem
= hard_frame_pointer_rtx
;
479 /* Some machines need special handling before we can access
480 arbitrary frames. For example, on the sparc, we must first flush
481 all register windows to the stack. */
482 #ifdef SETUP_FRAME_ADDRESSES
484 SETUP_FRAME_ADDRESSES ();
487 /* On the sparc, the return address is not in the frame, it is in a
488 register. There is no way to access it off of the current frame
489 pointer, but it can be accessed off the previous frame pointer by
490 reading the value from the register window save area. */
491 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
492 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
496 /* Scan back COUNT frames to the specified frame. */
497 for (i
= 0; i
< count
; i
++)
499 /* Assume the dynamic chain pointer is in the word that the
500 frame address points to, unless otherwise specified. */
501 #ifdef DYNAMIC_CHAIN_ADDRESS
502 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
504 tem
= memory_address (Pmode
, tem
);
505 tem
= gen_rtx_MEM (Pmode
, tem
);
506 set_mem_alias_set (tem
, get_frame_alias_set ());
507 tem
= copy_to_reg (tem
);
510 /* For __builtin_frame_address, return what we've got. */
511 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
514 /* For __builtin_return_address, Get the return address from that
516 #ifdef RETURN_ADDR_RTX
517 tem
= RETURN_ADDR_RTX (count
, tem
);
519 tem
= memory_address (Pmode
,
520 plus_constant (tem
, GET_MODE_SIZE (Pmode
)));
521 tem
= gen_rtx_MEM (Pmode
, tem
);
522 set_mem_alias_set (tem
, get_frame_alias_set ());
527 /* Alias set used for setjmp buffer. */
528 static HOST_WIDE_INT setjmp_alias_set
= -1;
530 /* Construct the leading half of a __builtin_setjmp call. Control will
531 return to RECEIVER_LABEL. This is used directly by sjlj exception
535 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
537 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
541 if (setjmp_alias_set
== -1)
542 setjmp_alias_set
= new_alias_set ();
544 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
546 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
548 /* We store the frame pointer and the address of receiver_label in
549 the buffer and use the rest of it for the stack save area, which
550 is machine-dependent. */
552 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
553 set_mem_alias_set (mem
, setjmp_alias_set
);
554 emit_move_insn (mem
, targetm
.builtin_setjmp_frame_value ());
556 mem
= gen_rtx_MEM (Pmode
, plus_constant (buf_addr
, GET_MODE_SIZE (Pmode
))),
557 set_mem_alias_set (mem
, setjmp_alias_set
);
559 emit_move_insn (validize_mem (mem
),
560 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
562 stack_save
= gen_rtx_MEM (sa_mode
,
563 plus_constant (buf_addr
,
564 2 * GET_MODE_SIZE (Pmode
)));
565 set_mem_alias_set (stack_save
, setjmp_alias_set
);
566 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
568 /* If there is further processing to do, do it. */
569 #ifdef HAVE_builtin_setjmp_setup
570 if (HAVE_builtin_setjmp_setup
)
571 emit_insn (gen_builtin_setjmp_setup (buf_addr
));
574 /* Tell optimize_save_area_alloca that extra work is going to
575 need to go on during alloca. */
576 current_function_calls_setjmp
= 1;
578 /* Set this so all the registers get saved in our frame; we need to be
579 able to copy the saved values for any registers from frames we unwind. */
580 current_function_has_nonlocal_label
= 1;
583 /* Construct the trailing part of a __builtin_setjmp call.
584 This is used directly by sjlj exception handling code. */
587 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED
)
589 /* Clobber the FP when we get here, so we have to make sure it's
590 marked as used by this function. */
591 emit_insn (gen_rtx_USE (VOIDmode
, hard_frame_pointer_rtx
));
593 /* Mark the static chain as clobbered here so life information
594 doesn't get messed up for it. */
595 emit_insn (gen_rtx_CLOBBER (VOIDmode
, static_chain_rtx
));
597 /* Now put in the code to restore the frame pointer, and argument
598 pointer, if needed. */
599 #ifdef HAVE_nonlocal_goto
600 if (! HAVE_nonlocal_goto
)
602 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
604 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
605 if (fixed_regs
[ARG_POINTER_REGNUM
])
607 #ifdef ELIMINABLE_REGS
609 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
611 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
612 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
613 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
616 if (i
== ARRAY_SIZE (elim_regs
))
619 /* Now restore our arg pointer from the address at which it
620 was saved in our stack frame. */
621 emit_move_insn (virtual_incoming_args_rtx
,
622 copy_to_reg (get_arg_pointer_save_area (cfun
)));
627 #ifdef HAVE_builtin_setjmp_receiver
628 if (HAVE_builtin_setjmp_receiver
)
629 emit_insn (gen_builtin_setjmp_receiver (receiver_label
));
632 #ifdef HAVE_nonlocal_goto_receiver
633 if (HAVE_nonlocal_goto_receiver
)
634 emit_insn (gen_nonlocal_goto_receiver ());
639 /* @@@ This is a kludge. Not all machine descriptions define a blockage
640 insn, but we must not allow the code we just generated to be reordered
641 by scheduling. Specifically, the update of the frame pointer must
642 happen immediately, not later. So emit an ASM_INPUT to act as blockage
644 emit_insn (gen_rtx_ASM_INPUT (VOIDmode
, ""));
647 /* __builtin_setjmp is passed a pointer to an array of five words (not
648 all will be used on all machines). It operates similarly to the C
649 library function of the same name, but is more efficient. Much of
650 the code below (and for longjmp) is copied from the handling of
653 NOTE: This is intended for use by GNAT and the exception handling
654 scheme in the compiler and will only work in the method used by
658 expand_builtin_setjmp (tree arglist
, rtx target
)
660 rtx buf_addr
, next_lab
, cont_lab
;
662 if (!validate_arglist (arglist
, POINTER_TYPE
, VOID_TYPE
))
665 if (target
== 0 || !REG_P (target
)
666 || REGNO (target
) < FIRST_PSEUDO_REGISTER
)
667 target
= gen_reg_rtx (TYPE_MODE (integer_type_node
));
669 buf_addr
= expand_expr (TREE_VALUE (arglist
), NULL_RTX
, VOIDmode
, 0);
671 next_lab
= gen_label_rtx ();
672 cont_lab
= gen_label_rtx ();
674 expand_builtin_setjmp_setup (buf_addr
, next_lab
);
676 /* Set TARGET to zero and branch to the continue label. Use emit_jump to
677 ensure that pending stack adjustments are flushed. */
678 emit_move_insn (target
, const0_rtx
);
679 emit_jump (cont_lab
);
681 emit_label (next_lab
);
683 expand_builtin_setjmp_receiver (next_lab
);
685 /* Set TARGET to one. */
686 emit_move_insn (target
, const1_rtx
);
687 emit_label (cont_lab
);
689 /* Tell flow about the strange goings on. Putting `next_lab' on
690 `nonlocal_goto_handler_labels' to indicates that function
691 calls may traverse the arc back to this label. */
693 current_function_has_nonlocal_label
= 1;
694 nonlocal_goto_handler_labels
695 = gen_rtx_EXPR_LIST (VOIDmode
, next_lab
, nonlocal_goto_handler_labels
);
700 /* __builtin_longjmp is passed a pointer to an array of five words (not
701 all will be used on all machines). It operates similarly to the C
702 library function of the same name, but is more efficient. Much of
703 the code below is copied from the handling of non-local gotos.
705 NOTE: This is intended for use by GNAT and the exception handling
706 scheme in the compiler and will only work in the method used by
710 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
712 rtx fp
, lab
, stack
, insn
, last
;
713 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
715 if (setjmp_alias_set
== -1)
716 setjmp_alias_set
= new_alias_set ();
718 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
720 buf_addr
= force_reg (Pmode
, buf_addr
);
722 /* We used to store value in static_chain_rtx, but that fails if pointers
723 are smaller than integers. We instead require that the user must pass
724 a second argument of 1, because that is what builtin_setjmp will
725 return. This also makes EH slightly more efficient, since we are no
726 longer copying around a value that we don't care about. */
727 gcc_assert (value
== const1_rtx
);
729 last
= get_last_insn ();
730 #ifdef HAVE_builtin_longjmp
731 if (HAVE_builtin_longjmp
)
732 emit_insn (gen_builtin_longjmp (buf_addr
));
736 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
737 lab
= gen_rtx_MEM (Pmode
, plus_constant (buf_addr
,
738 GET_MODE_SIZE (Pmode
)));
740 stack
= gen_rtx_MEM (sa_mode
, plus_constant (buf_addr
,
741 2 * GET_MODE_SIZE (Pmode
)));
742 set_mem_alias_set (fp
, setjmp_alias_set
);
743 set_mem_alias_set (lab
, setjmp_alias_set
);
744 set_mem_alias_set (stack
, setjmp_alias_set
);
746 /* Pick up FP, label, and SP from the block and jump. This code is
747 from expand_goto in stmt.c; see there for detailed comments. */
748 #if HAVE_nonlocal_goto
749 if (HAVE_nonlocal_goto
)
750 /* We have to pass a value to the nonlocal_goto pattern that will
751 get copied into the static_chain pointer, but it does not matter
752 what that value is, because builtin_setjmp does not use it. */
753 emit_insn (gen_nonlocal_goto (value
, lab
, stack
, fp
));
757 lab
= copy_to_reg (lab
);
759 emit_insn (gen_rtx_CLOBBER (VOIDmode
,
760 gen_rtx_MEM (BLKmode
,
761 gen_rtx_SCRATCH (VOIDmode
))));
762 emit_insn (gen_rtx_CLOBBER (VOIDmode
,
763 gen_rtx_MEM (BLKmode
,
764 hard_frame_pointer_rtx
)));
766 emit_move_insn (hard_frame_pointer_rtx
, fp
);
767 emit_stack_restore (SAVE_NONLOCAL
, stack
, NULL_RTX
);
769 emit_insn (gen_rtx_USE (VOIDmode
, hard_frame_pointer_rtx
));
770 emit_insn (gen_rtx_USE (VOIDmode
, stack_pointer_rtx
));
771 emit_indirect_jump (lab
);
775 /* Search backwards and mark the jump insn as a non-local goto.
776 Note that this precludes the use of __builtin_longjmp to a
777 __builtin_setjmp target in the same function. However, we've
778 already cautioned the user that these functions are for
779 internal exception handling use only. */
780 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
782 gcc_assert (insn
!= last
);
786 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO
, const0_rtx
,
790 else if (CALL_P (insn
))
795 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
796 and the address of the save area. */
799 expand_builtin_nonlocal_goto (tree arglist
)
801 tree t_label
, t_save_area
;
802 rtx r_label
, r_save_area
, r_fp
, r_sp
, insn
;
804 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
807 t_label
= TREE_VALUE (arglist
);
808 arglist
= TREE_CHAIN (arglist
);
809 t_save_area
= TREE_VALUE (arglist
);
811 r_label
= expand_expr (t_label
, NULL_RTX
, VOIDmode
, 0);
812 r_label
= convert_memory_address (Pmode
, r_label
);
813 r_save_area
= expand_expr (t_save_area
, NULL_RTX
, VOIDmode
, 0);
814 r_save_area
= convert_memory_address (Pmode
, r_save_area
);
815 r_fp
= gen_rtx_MEM (Pmode
, r_save_area
);
816 r_sp
= gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL
),
817 plus_constant (r_save_area
, GET_MODE_SIZE (Pmode
)));
819 current_function_has_nonlocal_goto
= 1;
821 #if HAVE_nonlocal_goto
822 /* ??? We no longer need to pass the static chain value, afaik. */
823 if (HAVE_nonlocal_goto
)
824 emit_insn (gen_nonlocal_goto (const0_rtx
, r_label
, r_sp
, r_fp
));
828 r_label
= copy_to_reg (r_label
);
830 emit_insn (gen_rtx_CLOBBER (VOIDmode
,
831 gen_rtx_MEM (BLKmode
,
832 gen_rtx_SCRATCH (VOIDmode
))));
834 emit_insn (gen_rtx_CLOBBER (VOIDmode
,
835 gen_rtx_MEM (BLKmode
,
836 hard_frame_pointer_rtx
)));
838 /* Restore frame pointer for containing function.
839 This sets the actual hard register used for the frame pointer
840 to the location of the function's incoming static chain info.
841 The non-local goto handler will then adjust it to contain the
842 proper value and reload the argument pointer, if needed. */
843 emit_move_insn (hard_frame_pointer_rtx
, r_fp
);
844 emit_stack_restore (SAVE_NONLOCAL
, r_sp
, NULL_RTX
);
846 /* USE of hard_frame_pointer_rtx added for consistency;
847 not clear if really needed. */
848 emit_insn (gen_rtx_USE (VOIDmode
, hard_frame_pointer_rtx
));
849 emit_insn (gen_rtx_USE (VOIDmode
, stack_pointer_rtx
));
850 emit_indirect_jump (r_label
);
853 /* Search backwards to the jump insn and mark it as a
855 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
859 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO
,
860 const0_rtx
, REG_NOTES (insn
));
863 else if (CALL_P (insn
))
870 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
871 (not all will be used on all machines) that was passed to __builtin_setjmp.
872 It updates the stack pointer in that block to correspond to the current
876 expand_builtin_update_setjmp_buf (rtx buf_addr
)
878 enum machine_mode sa_mode
= Pmode
;
882 #ifdef HAVE_save_stack_nonlocal
883 if (HAVE_save_stack_nonlocal
)
884 sa_mode
= insn_data
[(int) CODE_FOR_save_stack_nonlocal
].operand
[0].mode
;
886 #ifdef STACK_SAVEAREA_MODE
887 sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
891 = gen_rtx_MEM (sa_mode
,
894 plus_constant (buf_addr
, 2 * GET_MODE_SIZE (Pmode
))));
898 emit_insn (gen_setjmp ());
901 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
904 /* Expand a call to __builtin_prefetch. For a target that does not support
905 data prefetch, evaluate the memory address argument in case it has side
909 expand_builtin_prefetch (tree arglist
)
911 tree arg0
, arg1
, arg2
;
914 if (!validate_arglist (arglist
, POINTER_TYPE
, 0))
917 arg0
= TREE_VALUE (arglist
);
918 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
919 zero (read) and argument 2 (locality) defaults to 3 (high degree of
921 if (TREE_CHAIN (arglist
))
923 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
924 if (TREE_CHAIN (TREE_CHAIN (arglist
)))
925 arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
927 arg2
= build_int_cst (NULL_TREE
, 3);
931 arg1
= integer_zero_node
;
932 arg2
= build_int_cst (NULL_TREE
, 3);
935 /* Argument 0 is an address. */
936 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
938 /* Argument 1 (read/write flag) must be a compile-time constant int. */
939 if (TREE_CODE (arg1
) != INTEGER_CST
)
941 error ("second argument to %<__builtin_prefetch%> must be a constant");
942 arg1
= integer_zero_node
;
944 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
945 /* Argument 1 must be either zero or one. */
946 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
948 warning ("invalid second argument to %<__builtin_prefetch%>;"
953 /* Argument 2 (locality) must be a compile-time constant int. */
954 if (TREE_CODE (arg2
) != INTEGER_CST
)
956 error ("third argument to %<__builtin_prefetch%> must be a constant");
957 arg2
= integer_zero_node
;
959 op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
960 /* Argument 2 must be 0, 1, 2, or 3. */
961 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
963 warning ("invalid third argument to %<__builtin_prefetch%>; using zero");
970 if ((! (*insn_data
[(int) CODE_FOR_prefetch
].operand
[0].predicate
)
972 insn_data
[(int) CODE_FOR_prefetch
].operand
[0].mode
))
973 || (GET_MODE (op0
) != Pmode
))
975 op0
= convert_memory_address (Pmode
, op0
);
976 op0
= force_reg (Pmode
, op0
);
978 emit_insn (gen_prefetch (op0
, op1
, op2
));
982 /* Don't do anything with direct references to volatile memory, but
983 generate code to handle other side effects. */
984 if (!MEM_P (op0
) && side_effects_p (op0
))
988 /* Get a MEM rtx for expression EXP which is the address of an operand
989 to be used to be used in a string instruction (cmpstrsi, movmemsi, ..). */
992 get_memory_rtx (tree exp
)
994 rtx addr
= expand_expr (exp
, NULL_RTX
, ptr_mode
, EXPAND_NORMAL
);
995 rtx mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
997 /* Get an expression we can use to find the attributes to assign to MEM.
998 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
999 we can. First remove any nops. */
1000 while ((TREE_CODE (exp
) == NOP_EXPR
|| TREE_CODE (exp
) == CONVERT_EXPR
1001 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
1002 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
1003 exp
= TREE_OPERAND (exp
, 0);
1005 if (TREE_CODE (exp
) == ADDR_EXPR
)
1006 exp
= TREE_OPERAND (exp
, 0);
1007 else if (POINTER_TYPE_P (TREE_TYPE (exp
)))
1008 exp
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (exp
)), exp
);
1012 /* Honor attributes derived from exp, except for the alias set
1013 (as builtin stringops may alias with anything) and the size
1014 (as stringops may access multiple array elements). */
1017 set_mem_attributes (mem
, exp
, 0);
1018 set_mem_alias_set (mem
, 0);
1019 set_mem_size (mem
, NULL_RTX
);
1025 /* Built-in functions to perform an untyped call and return. */
1027 /* For each register that may be used for calling a function, this
1028 gives a mode used to copy the register's value. VOIDmode indicates
1029 the register is not used for calling a function. If the machine
1030 has register windows, this gives only the outbound registers.
1031 INCOMING_REGNO gives the corresponding inbound register. */
1032 static enum machine_mode apply_args_mode
[FIRST_PSEUDO_REGISTER
];
1034 /* For each register that may be used for returning values, this gives
1035 a mode used to copy the register's value. VOIDmode indicates the
1036 register is not used for returning values. If the machine has
1037 register windows, this gives only the outbound registers.
1038 INCOMING_REGNO gives the corresponding inbound register. */
1039 static enum machine_mode apply_result_mode
[FIRST_PSEUDO_REGISTER
];
1041 /* For each register that may be used for calling a function, this
1042 gives the offset of that register into the block returned by
1043 __builtin_apply_args. 0 indicates that the register is not
1044 used for calling a function. */
1045 static int apply_args_reg_offset
[FIRST_PSEUDO_REGISTER
];
1047 /* Return the size required for the block returned by __builtin_apply_args,
1048 and initialize apply_args_mode. */
1051 apply_args_size (void)
1053 static int size
= -1;
1056 enum machine_mode mode
;
1058 /* The values computed by this function never change. */
1061 /* The first value is the incoming arg-pointer. */
1062 size
= GET_MODE_SIZE (Pmode
);
1064 /* The second value is the structure value address unless this is
1065 passed as an "invisible" first argument. */
1066 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1067 size
+= GET_MODE_SIZE (Pmode
);
1069 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1070 if (FUNCTION_ARG_REGNO_P (regno
))
1072 mode
= reg_raw_mode
[regno
];
1074 gcc_assert (mode
!= VOIDmode
);
1076 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1077 if (size
% align
!= 0)
1078 size
= CEIL (size
, align
) * align
;
1079 apply_args_reg_offset
[regno
] = size
;
1080 size
+= GET_MODE_SIZE (mode
);
1081 apply_args_mode
[regno
] = mode
;
1085 apply_args_mode
[regno
] = VOIDmode
;
1086 apply_args_reg_offset
[regno
] = 0;
1092 /* Return the size required for the block returned by __builtin_apply,
1093 and initialize apply_result_mode. */
1096 apply_result_size (void)
1098 static int size
= -1;
1100 enum machine_mode mode
;
1102 /* The values computed by this function never change. */
1107 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1108 if (FUNCTION_VALUE_REGNO_P (regno
))
1110 mode
= reg_raw_mode
[regno
];
1112 gcc_assert (mode
!= VOIDmode
);
1114 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1115 if (size
% align
!= 0)
1116 size
= CEIL (size
, align
) * align
;
1117 size
+= GET_MODE_SIZE (mode
);
1118 apply_result_mode
[regno
] = mode
;
1121 apply_result_mode
[regno
] = VOIDmode
;
1123 /* Allow targets that use untyped_call and untyped_return to override
1124 the size so that machine-specific information can be stored here. */
1125 #ifdef APPLY_RESULT_SIZE
1126 size
= APPLY_RESULT_SIZE
;
1132 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1133 /* Create a vector describing the result block RESULT. If SAVEP is true,
1134 the result block is used to save the values; otherwise it is used to
1135 restore the values. */
1138 result_vector (int savep
, rtx result
)
1140 int regno
, size
, align
, nelts
;
1141 enum machine_mode mode
;
1143 rtx
*savevec
= alloca (FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
1146 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1147 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1149 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1150 if (size
% align
!= 0)
1151 size
= CEIL (size
, align
) * align
;
1152 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1153 mem
= adjust_address (result
, mode
, size
);
1154 savevec
[nelts
++] = (savep
1155 ? gen_rtx_SET (VOIDmode
, mem
, reg
)
1156 : gen_rtx_SET (VOIDmode
, reg
, mem
));
1157 size
+= GET_MODE_SIZE (mode
);
1159 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1161 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1163 /* Save the state required to perform an untyped call with the same
1164 arguments as were passed to the current function. */
1167 expand_builtin_apply_args_1 (void)
1170 int size
, align
, regno
;
1171 enum machine_mode mode
;
1172 rtx struct_incoming_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 1);
1174 /* Create a block where the arg-pointer, structure value address,
1175 and argument registers can be saved. */
1176 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1178 /* Walk past the arg-pointer and structure value address. */
1179 size
= GET_MODE_SIZE (Pmode
);
1180 if (targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0))
1181 size
+= GET_MODE_SIZE (Pmode
);
1183 /* Save each register used in calling a function to the block. */
1184 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1185 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1187 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1188 if (size
% align
!= 0)
1189 size
= CEIL (size
, align
) * align
;
1191 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1193 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1194 size
+= GET_MODE_SIZE (mode
);
1197 /* Save the arg pointer to the block. */
1198 tem
= copy_to_reg (virtual_incoming_args_rtx
);
1199 #ifdef STACK_GROWS_DOWNWARD
1200 /* We need the pointer as the caller actually passed them to us, not
1201 as we might have pretended they were passed. Make sure it's a valid
1202 operand, as emit_move_insn isn't expected to handle a PLUS. */
1204 = force_operand (plus_constant (tem
, current_function_pretend_args_size
),
1207 emit_move_insn (adjust_address (registers
, Pmode
, 0), tem
);
1209 size
= GET_MODE_SIZE (Pmode
);
1211 /* Save the structure value address unless this is passed as an
1212 "invisible" first argument. */
1213 if (struct_incoming_value
)
1215 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1216 copy_to_reg (struct_incoming_value
));
1217 size
+= GET_MODE_SIZE (Pmode
);
1220 /* Return the address of the block. */
1221 return copy_addr_to_reg (XEXP (registers
, 0));
1224 /* __builtin_apply_args returns block of memory allocated on
1225 the stack into which is stored the arg pointer, structure
1226 value address, static chain, and all the registers that might
1227 possibly be used in performing a function call. The code is
1228 moved to the start of the function so the incoming values are
1232 expand_builtin_apply_args (void)
1234 /* Don't do __builtin_apply_args more than once in a function.
1235 Save the result of the first call and reuse it. */
1236 if (apply_args_value
!= 0)
1237 return apply_args_value
;
1239 /* When this function is called, it means that registers must be
1240 saved on entry to this function. So we migrate the
1241 call to the first insn of this function. */
1246 temp
= expand_builtin_apply_args_1 ();
1250 apply_args_value
= temp
;
1252 /* Put the insns after the NOTE that starts the function.
1253 If this is inside a start_sequence, make the outer-level insn
1254 chain current, so the code is placed at the start of the
1256 push_topmost_sequence ();
1257 emit_insn_before (seq
, NEXT_INSN (entry_of_function ()));
1258 pop_topmost_sequence ();
1263 /* Perform an untyped call and save the state required to perform an
1264 untyped return of whatever value was returned by the given function. */
1267 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1269 int size
, align
, regno
;
1270 enum machine_mode mode
;
1271 rtx incoming_args
, result
, reg
, dest
, src
, call_insn
;
1272 rtx old_stack_level
= 0;
1273 rtx call_fusage
= 0;
1274 rtx struct_value
= targetm
.calls
.struct_value_rtx (cfun
? TREE_TYPE (cfun
->decl
) : 0, 0);
1276 arguments
= convert_memory_address (Pmode
, arguments
);
1278 /* Create a block where the return registers can be saved. */
1279 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1281 /* Fetch the arg pointer from the ARGUMENTS block. */
1282 incoming_args
= gen_reg_rtx (Pmode
);
1283 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1284 #ifndef STACK_GROWS_DOWNWARD
1285 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1286 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1289 /* Push a new argument block and copy the arguments. Do not allow
1290 the (potential) memcpy call below to interfere with our stack
1292 do_pending_stack_adjust ();
1295 /* Save the stack with nonlocal if available. */
1296 #ifdef HAVE_save_stack_nonlocal
1297 if (HAVE_save_stack_nonlocal
)
1298 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
, NULL_RTX
);
1301 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
1303 /* Allocate a block of memory onto the stack and copy the memory
1304 arguments to the outgoing arguments address. */
1305 allocate_dynamic_stack_space (argsize
, 0, BITS_PER_UNIT
);
1306 dest
= virtual_outgoing_args_rtx
;
1307 #ifndef STACK_GROWS_DOWNWARD
1308 if (GET_CODE (argsize
) == CONST_INT
)
1309 dest
= plus_constant (dest
, -INTVAL (argsize
));
1311 dest
= gen_rtx_PLUS (Pmode
, dest
, negate_rtx (Pmode
, argsize
));
1313 dest
= gen_rtx_MEM (BLKmode
, dest
);
1314 set_mem_align (dest
, PARM_BOUNDARY
);
1315 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1316 set_mem_align (src
, PARM_BOUNDARY
);
1317 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1319 /* Refer to the argument block. */
1321 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1322 set_mem_align (arguments
, PARM_BOUNDARY
);
1324 /* Walk past the arg-pointer and structure value address. */
1325 size
= GET_MODE_SIZE (Pmode
);
1327 size
+= GET_MODE_SIZE (Pmode
);
1329 /* Restore each of the registers previously saved. Make USE insns
1330 for each of these registers for use in making the call. */
1331 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1332 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1334 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1335 if (size
% align
!= 0)
1336 size
= CEIL (size
, align
) * align
;
1337 reg
= gen_rtx_REG (mode
, regno
);
1338 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1339 use_reg (&call_fusage
, reg
);
1340 size
+= GET_MODE_SIZE (mode
);
1343 /* Restore the structure value address unless this is passed as an
1344 "invisible" first argument. */
1345 size
= GET_MODE_SIZE (Pmode
);
1348 rtx value
= gen_reg_rtx (Pmode
);
1349 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1350 emit_move_insn (struct_value
, value
);
1351 if (REG_P (struct_value
))
1352 use_reg (&call_fusage
, struct_value
);
1353 size
+= GET_MODE_SIZE (Pmode
);
1356 /* All arguments and registers used for the call are set up by now! */
1357 function
= prepare_call_address (function
, NULL
, &call_fusage
, 0, 0);
1359 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1360 and we don't want to load it into a register as an optimization,
1361 because prepare_call_address already did it if it should be done. */
1362 if (GET_CODE (function
) != SYMBOL_REF
)
1363 function
= memory_address (FUNCTION_MODE
, function
);
1365 /* Generate the actual call instruction and save the return value. */
1366 #ifdef HAVE_untyped_call
1367 if (HAVE_untyped_call
)
1368 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE
, function
),
1369 result
, result_vector (1, result
)));
1372 #ifdef HAVE_call_value
1373 if (HAVE_call_value
)
1377 /* Locate the unique return register. It is not possible to
1378 express a call that sets more than one return register using
1379 call_value; use untyped_call for that. In fact, untyped_call
1380 only needs to save the return registers in the given block. */
1381 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1382 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1384 gcc_assert (!valreg
); /* HAVE_untyped_call required. */
1386 valreg
= gen_rtx_REG (mode
, regno
);
1389 emit_call_insn (GEN_CALL_VALUE (valreg
,
1390 gen_rtx_MEM (FUNCTION_MODE
, function
),
1391 const0_rtx
, NULL_RTX
, const0_rtx
));
1393 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1399 /* Find the CALL insn we just emitted, and attach the register usage
1401 call_insn
= last_call_insn ();
1402 add_function_usage_to (call_insn
, call_fusage
);
1404 /* Restore the stack. */
1405 #ifdef HAVE_save_stack_nonlocal
1406 if (HAVE_save_stack_nonlocal
)
1407 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
, NULL_RTX
);
1410 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
1414 /* Return the address of the result block. */
1415 result
= copy_addr_to_reg (XEXP (result
, 0));
1416 return convert_memory_address (ptr_mode
, result
);
1419 /* Perform an untyped return. */
1422 expand_builtin_return (rtx result
)
1424 int size
, align
, regno
;
1425 enum machine_mode mode
;
1427 rtx call_fusage
= 0;
1429 result
= convert_memory_address (Pmode
, result
);
1431 apply_result_size ();
1432 result
= gen_rtx_MEM (BLKmode
, result
);
1434 #ifdef HAVE_untyped_return
1435 if (HAVE_untyped_return
)
1437 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
1443 /* Restore the return value and note that each value is used. */
1445 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1446 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1448 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1449 if (size
% align
!= 0)
1450 size
= CEIL (size
, align
) * align
;
1451 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1452 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1454 push_to_sequence (call_fusage
);
1455 emit_insn (gen_rtx_USE (VOIDmode
, reg
));
1456 call_fusage
= get_insns ();
1458 size
+= GET_MODE_SIZE (mode
);
1461 /* Put the USE insns before the return. */
1462 emit_insn (call_fusage
);
1464 /* Return whatever values was restored by jumping directly to the end
1466 expand_naked_return ();
1469 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1471 static enum type_class
1472 type_to_class (tree type
)
1474 switch (TREE_CODE (type
))
1476 case VOID_TYPE
: return void_type_class
;
1477 case INTEGER_TYPE
: return integer_type_class
;
1478 case CHAR_TYPE
: return char_type_class
;
1479 case ENUMERAL_TYPE
: return enumeral_type_class
;
1480 case BOOLEAN_TYPE
: return boolean_type_class
;
1481 case POINTER_TYPE
: return pointer_type_class
;
1482 case REFERENCE_TYPE
: return reference_type_class
;
1483 case OFFSET_TYPE
: return offset_type_class
;
1484 case REAL_TYPE
: return real_type_class
;
1485 case COMPLEX_TYPE
: return complex_type_class
;
1486 case FUNCTION_TYPE
: return function_type_class
;
1487 case METHOD_TYPE
: return method_type_class
;
1488 case RECORD_TYPE
: return record_type_class
;
1490 case QUAL_UNION_TYPE
: return union_type_class
;
1491 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1492 ? string_type_class
: array_type_class
);
1493 case LANG_TYPE
: return lang_type_class
;
1494 default: return no_type_class
;
1498 /* Expand a call to __builtin_classify_type with arguments found in
1502 expand_builtin_classify_type (tree arglist
)
1505 return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist
))));
1506 return GEN_INT (no_type_class
);
1509 /* This helper macro, meant to be used in mathfn_built_in below,
1510 determines which among a set of three builtin math functions is
1511 appropriate for a given type mode. The `F' and `L' cases are
1512 automatically generated from the `double' case. */
1513 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1514 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1515 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1516 fcodel = BUILT_IN_MATHFN##L ; break;
1518 /* Return mathematic function equivalent to FN but operating directly
1519 on TYPE, if available. If we can't do the conversion, return zero. */
1521 mathfn_built_in (tree type
, enum built_in_function fn
)
1523 enum built_in_function fcode
, fcodef
, fcodel
;
1527 CASE_MATHFN (BUILT_IN_ACOS
)
1528 CASE_MATHFN (BUILT_IN_ACOSH
)
1529 CASE_MATHFN (BUILT_IN_ASIN
)
1530 CASE_MATHFN (BUILT_IN_ASINH
)
1531 CASE_MATHFN (BUILT_IN_ATAN
)
1532 CASE_MATHFN (BUILT_IN_ATAN2
)
1533 CASE_MATHFN (BUILT_IN_ATANH
)
1534 CASE_MATHFN (BUILT_IN_CBRT
)
1535 CASE_MATHFN (BUILT_IN_CEIL
)
1536 CASE_MATHFN (BUILT_IN_COPYSIGN
)
1537 CASE_MATHFN (BUILT_IN_COS
)
1538 CASE_MATHFN (BUILT_IN_COSH
)
1539 CASE_MATHFN (BUILT_IN_DREM
)
1540 CASE_MATHFN (BUILT_IN_ERF
)
1541 CASE_MATHFN (BUILT_IN_ERFC
)
1542 CASE_MATHFN (BUILT_IN_EXP
)
1543 CASE_MATHFN (BUILT_IN_EXP10
)
1544 CASE_MATHFN (BUILT_IN_EXP2
)
1545 CASE_MATHFN (BUILT_IN_EXPM1
)
1546 CASE_MATHFN (BUILT_IN_FABS
)
1547 CASE_MATHFN (BUILT_IN_FDIM
)
1548 CASE_MATHFN (BUILT_IN_FLOOR
)
1549 CASE_MATHFN (BUILT_IN_FMA
)
1550 CASE_MATHFN (BUILT_IN_FMAX
)
1551 CASE_MATHFN (BUILT_IN_FMIN
)
1552 CASE_MATHFN (BUILT_IN_FMOD
)
1553 CASE_MATHFN (BUILT_IN_FREXP
)
1554 CASE_MATHFN (BUILT_IN_GAMMA
)
1555 CASE_MATHFN (BUILT_IN_HUGE_VAL
)
1556 CASE_MATHFN (BUILT_IN_HYPOT
)
1557 CASE_MATHFN (BUILT_IN_ILOGB
)
1558 CASE_MATHFN (BUILT_IN_INF
)
1559 CASE_MATHFN (BUILT_IN_J0
)
1560 CASE_MATHFN (BUILT_IN_J1
)
1561 CASE_MATHFN (BUILT_IN_JN
)
1562 CASE_MATHFN (BUILT_IN_LCEIL
)
1563 CASE_MATHFN (BUILT_IN_LDEXP
)
1564 CASE_MATHFN (BUILT_IN_LFLOOR
)
1565 CASE_MATHFN (BUILT_IN_LGAMMA
)
1566 CASE_MATHFN (BUILT_IN_LLCEIL
)
1567 CASE_MATHFN (BUILT_IN_LLFLOOR
)
1568 CASE_MATHFN (BUILT_IN_LLRINT
)
1569 CASE_MATHFN (BUILT_IN_LLROUND
)
1570 CASE_MATHFN (BUILT_IN_LOG
)
1571 CASE_MATHFN (BUILT_IN_LOG10
)
1572 CASE_MATHFN (BUILT_IN_LOG1P
)
1573 CASE_MATHFN (BUILT_IN_LOG2
)
1574 CASE_MATHFN (BUILT_IN_LOGB
)
1575 CASE_MATHFN (BUILT_IN_LRINT
)
1576 CASE_MATHFN (BUILT_IN_LROUND
)
1577 CASE_MATHFN (BUILT_IN_MODF
)
1578 CASE_MATHFN (BUILT_IN_NAN
)
1579 CASE_MATHFN (BUILT_IN_NANS
)
1580 CASE_MATHFN (BUILT_IN_NEARBYINT
)
1581 CASE_MATHFN (BUILT_IN_NEXTAFTER
)
1582 CASE_MATHFN (BUILT_IN_NEXTTOWARD
)
1583 CASE_MATHFN (BUILT_IN_POW
)
1584 CASE_MATHFN (BUILT_IN_POWI
)
1585 CASE_MATHFN (BUILT_IN_POW10
)
1586 CASE_MATHFN (BUILT_IN_REMAINDER
)
1587 CASE_MATHFN (BUILT_IN_REMQUO
)
1588 CASE_MATHFN (BUILT_IN_RINT
)
1589 CASE_MATHFN (BUILT_IN_ROUND
)
1590 CASE_MATHFN (BUILT_IN_SCALB
)
1591 CASE_MATHFN (BUILT_IN_SCALBLN
)
1592 CASE_MATHFN (BUILT_IN_SCALBN
)
1593 CASE_MATHFN (BUILT_IN_SIGNIFICAND
)
1594 CASE_MATHFN (BUILT_IN_SIN
)
1595 CASE_MATHFN (BUILT_IN_SINCOS
)
1596 CASE_MATHFN (BUILT_IN_SINH
)
1597 CASE_MATHFN (BUILT_IN_SQRT
)
1598 CASE_MATHFN (BUILT_IN_TAN
)
1599 CASE_MATHFN (BUILT_IN_TANH
)
1600 CASE_MATHFN (BUILT_IN_TGAMMA
)
1601 CASE_MATHFN (BUILT_IN_TRUNC
)
1602 CASE_MATHFN (BUILT_IN_Y0
)
1603 CASE_MATHFN (BUILT_IN_Y1
)
1604 CASE_MATHFN (BUILT_IN_YN
)
1610 if (TYPE_MAIN_VARIANT (type
) == double_type_node
)
1611 return implicit_built_in_decls
[fcode
];
1612 else if (TYPE_MAIN_VARIANT (type
) == float_type_node
)
1613 return implicit_built_in_decls
[fcodef
];
1614 else if (TYPE_MAIN_VARIANT (type
) == long_double_type_node
)
1615 return implicit_built_in_decls
[fcodel
];
1620 /* If errno must be maintained, expand the RTL to check if the result,
1621 TARGET, of a built-in function call, EXP, is NaN, and if so set
1625 expand_errno_check (tree exp
, rtx target
)
1627 rtx lab
= gen_label_rtx ();
1629 /* Test the result; if it is NaN, set errno=EDOM because
1630 the argument was not in the domain. */
1631 emit_cmp_and_jump_insns (target
, target
, EQ
, 0, GET_MODE (target
),
1635 /* If this built-in doesn't throw an exception, set errno directly. */
1636 if (TREE_NOTHROW (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0)))
1638 #ifdef GEN_ERRNO_RTX
1639 rtx errno_rtx
= GEN_ERRNO_RTX
;
1642 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
1644 emit_move_insn (errno_rtx
, GEN_INT (TARGET_EDOM
));
1650 /* We can't set errno=EDOM directly; let the library call do it.
1651 Pop the arguments right away in case the call gets deleted. */
1653 expand_call (exp
, target
, 0);
1659 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1660 Return 0 if a normal call should be emitted rather than expanding the
1661 function in-line. EXP is the expression that is a call to the builtin
1662 function; if convenient, the result should be placed in TARGET.
1663 SUBTARGET may be used as the target for computing one of EXP's operands. */
1666 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
1668 optab builtin_optab
;
1669 rtx op0
, insns
, before_call
;
1670 tree fndecl
= get_callee_fndecl (exp
);
1671 tree arglist
= TREE_OPERAND (exp
, 1);
1672 enum machine_mode mode
;
1673 bool errno_set
= false;
1676 if (!validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
1679 arg
= TREE_VALUE (arglist
);
1681 switch (DECL_FUNCTION_CODE (fndecl
))
1684 case BUILT_IN_SQRTF
:
1685 case BUILT_IN_SQRTL
:
1686 errno_set
= ! tree_expr_nonnegative_p (arg
);
1687 builtin_optab
= sqrt_optab
;
1692 errno_set
= true; builtin_optab
= exp_optab
; break;
1693 case BUILT_IN_EXP10
:
1694 case BUILT_IN_EXP10F
:
1695 case BUILT_IN_EXP10L
:
1696 case BUILT_IN_POW10
:
1697 case BUILT_IN_POW10F
:
1698 case BUILT_IN_POW10L
:
1699 errno_set
= true; builtin_optab
= exp10_optab
; break;
1701 case BUILT_IN_EXP2F
:
1702 case BUILT_IN_EXP2L
:
1703 errno_set
= true; builtin_optab
= exp2_optab
; break;
1704 case BUILT_IN_EXPM1
:
1705 case BUILT_IN_EXPM1F
:
1706 case BUILT_IN_EXPM1L
:
1707 errno_set
= true; builtin_optab
= expm1_optab
; break;
1709 case BUILT_IN_LOGBF
:
1710 case BUILT_IN_LOGBL
:
1711 errno_set
= true; builtin_optab
= logb_optab
; break;
1712 case BUILT_IN_ILOGB
:
1713 case BUILT_IN_ILOGBF
:
1714 case BUILT_IN_ILOGBL
:
1715 errno_set
= true; builtin_optab
= ilogb_optab
; break;
1719 errno_set
= true; builtin_optab
= log_optab
; break;
1720 case BUILT_IN_LOG10
:
1721 case BUILT_IN_LOG10F
:
1722 case BUILT_IN_LOG10L
:
1723 errno_set
= true; builtin_optab
= log10_optab
; break;
1725 case BUILT_IN_LOG2F
:
1726 case BUILT_IN_LOG2L
:
1727 errno_set
= true; builtin_optab
= log2_optab
; break;
1728 case BUILT_IN_LOG1P
:
1729 case BUILT_IN_LOG1PF
:
1730 case BUILT_IN_LOG1PL
:
1731 errno_set
= true; builtin_optab
= log1p_optab
; break;
1733 case BUILT_IN_ASINF
:
1734 case BUILT_IN_ASINL
:
1735 builtin_optab
= asin_optab
; break;
1737 case BUILT_IN_ACOSF
:
1738 case BUILT_IN_ACOSL
:
1739 builtin_optab
= acos_optab
; break;
1743 builtin_optab
= tan_optab
; break;
1745 case BUILT_IN_ATANF
:
1746 case BUILT_IN_ATANL
:
1747 builtin_optab
= atan_optab
; break;
1748 case BUILT_IN_FLOOR
:
1749 case BUILT_IN_FLOORF
:
1750 case BUILT_IN_FLOORL
:
1751 builtin_optab
= floor_optab
; break;
1753 case BUILT_IN_CEILF
:
1754 case BUILT_IN_CEILL
:
1755 builtin_optab
= ceil_optab
; break;
1756 case BUILT_IN_TRUNC
:
1757 case BUILT_IN_TRUNCF
:
1758 case BUILT_IN_TRUNCL
:
1759 builtin_optab
= btrunc_optab
; break;
1760 case BUILT_IN_ROUND
:
1761 case BUILT_IN_ROUNDF
:
1762 case BUILT_IN_ROUNDL
:
1763 builtin_optab
= round_optab
; break;
1764 case BUILT_IN_NEARBYINT
:
1765 case BUILT_IN_NEARBYINTF
:
1766 case BUILT_IN_NEARBYINTL
:
1767 builtin_optab
= nearbyint_optab
; break;
1769 case BUILT_IN_RINTF
:
1770 case BUILT_IN_RINTL
:
1771 builtin_optab
= rint_optab
; break;
1772 case BUILT_IN_LRINT
:
1773 case BUILT_IN_LRINTF
:
1774 case BUILT_IN_LRINTL
:
1775 case BUILT_IN_LLRINT
:
1776 case BUILT_IN_LLRINTF
:
1777 case BUILT_IN_LLRINTL
:
1778 builtin_optab
= lrint_optab
; break;
1783 /* Make a suitable register to place result in. */
1784 mode
= TYPE_MODE (TREE_TYPE (exp
));
1786 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
1789 /* Before working hard, check whether the instruction is available. */
1790 if (builtin_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
1792 target
= gen_reg_rtx (mode
);
1794 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1795 need to expand the argument again. This way, we will not perform
1796 side-effects more the once. */
1797 narg
= builtin_save_expr (arg
);
1801 arglist
= build_tree_list (NULL_TREE
, arg
);
1802 exp
= build_function_call_expr (fndecl
, arglist
);
1805 op0
= expand_expr (arg
, subtarget
, VOIDmode
, 0);
1809 /* Compute into TARGET.
1810 Set TARGET to wherever the result comes back. */
1811 target
= expand_unop (mode
, builtin_optab
, op0
, target
, 0);
1816 expand_errno_check (exp
, target
);
1818 /* Output the entire sequence. */
1819 insns
= get_insns ();
1825 /* If we were unable to expand via the builtin, stop the sequence
1826 (without outputting the insns) and call to the library function
1827 with the stabilized argument list. */
1831 before_call
= get_last_insn ();
1833 target
= expand_call (exp
, target
, target
== const0_rtx
);
1835 /* If this is a sqrt operation and we don't care about errno, try to
1836 attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1837 This allows the semantics of the libcall to be visible to the RTL
1839 if (builtin_optab
== sqrt_optab
&& !errno_set
)
1841 /* Search backwards through the insns emitted by expand_call looking
1842 for the instruction with the REG_RETVAL note. */
1843 rtx last
= get_last_insn ();
1844 while (last
!= before_call
)
1846 if (find_reg_note (last
, REG_RETVAL
, NULL
))
1848 rtx note
= find_reg_note (last
, REG_EQUAL
, NULL
);
1849 /* Check that the REQ_EQUAL note is an EXPR_LIST with
1850 two elements, i.e. symbol_ref(sqrt) and the operand. */
1852 && GET_CODE (note
) == EXPR_LIST
1853 && GET_CODE (XEXP (note
, 0)) == EXPR_LIST
1854 && XEXP (XEXP (note
, 0), 1) != NULL_RTX
1855 && XEXP (XEXP (XEXP (note
, 0), 1), 1) == NULL_RTX
)
1857 rtx operand
= XEXP (XEXP (XEXP (note
, 0), 1), 0);
1858 /* Check operand is a register with expected mode. */
1861 && GET_MODE (operand
) == mode
)
1863 /* Replace the REG_EQUAL note with a SQRT rtx. */
1864 rtx equiv
= gen_rtx_SQRT (mode
, operand
);
1865 set_unique_reg_note (last
, REG_EQUAL
, equiv
);
1870 last
= PREV_INSN (last
);
1877 /* Expand a call to the builtin binary math functions (pow and atan2).
1878 Return 0 if a normal call should be emitted rather than expanding the
1879 function in-line. EXP is the expression that is a call to the builtin
1880 function; if convenient, the result should be placed in TARGET.
1881 SUBTARGET may be used as the target for computing one of EXP's
1885 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
1887 optab builtin_optab
;
1888 rtx op0
, op1
, insns
;
1889 int op1_type
= REAL_TYPE
;
1890 tree fndecl
= get_callee_fndecl (exp
);
1891 tree arglist
= TREE_OPERAND (exp
, 1);
1892 tree arg0
, arg1
, temp
, narg
;
1893 enum machine_mode mode
;
1894 bool errno_set
= true;
1897 if ((DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_LDEXP
)
1898 || (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_LDEXPF
)
1899 || (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_LDEXPL
))
1900 op1_type
= INTEGER_TYPE
;
1902 if (!validate_arglist (arglist
, REAL_TYPE
, op1_type
, VOID_TYPE
))
1905 arg0
= TREE_VALUE (arglist
);
1906 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
1908 switch (DECL_FUNCTION_CODE (fndecl
))
1913 builtin_optab
= pow_optab
; break;
1914 case BUILT_IN_ATAN2
:
1915 case BUILT_IN_ATAN2F
:
1916 case BUILT_IN_ATAN2L
:
1917 builtin_optab
= atan2_optab
; break;
1918 case BUILT_IN_LDEXP
:
1919 case BUILT_IN_LDEXPF
:
1920 case BUILT_IN_LDEXPL
:
1921 builtin_optab
= ldexp_optab
; break;
1923 case BUILT_IN_FMODF
:
1924 case BUILT_IN_FMODL
:
1925 builtin_optab
= fmod_optab
; break;
1927 case BUILT_IN_DREMF
:
1928 case BUILT_IN_DREML
:
1929 builtin_optab
= drem_optab
; break;
1934 /* Make a suitable register to place result in. */
1935 mode
= TYPE_MODE (TREE_TYPE (exp
));
1937 /* Before working hard, check whether the instruction is available. */
1938 if (builtin_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
1941 target
= gen_reg_rtx (mode
);
1943 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
1946 /* Always stabilize the argument list. */
1947 narg
= builtin_save_expr (arg1
);
1951 temp
= build_tree_list (NULL_TREE
, narg
);
1955 temp
= TREE_CHAIN (arglist
);
1957 narg
= builtin_save_expr (arg0
);
1961 arglist
= tree_cons (NULL_TREE
, narg
, temp
);
1965 arglist
= tree_cons (NULL_TREE
, arg0
, temp
);
1968 exp
= build_function_call_expr (fndecl
, arglist
);
1970 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
1971 op1
= expand_expr (arg1
, 0, VOIDmode
, 0);
1975 /* Compute into TARGET.
1976 Set TARGET to wherever the result comes back. */
1977 target
= expand_binop (mode
, builtin_optab
, op0
, op1
,
1978 target
, 0, OPTAB_DIRECT
);
1980 /* If we were unable to expand via the builtin, stop the sequence
1981 (without outputting the insns) and call to the library function
1982 with the stabilized argument list. */
1986 return expand_call (exp
, target
, target
== const0_rtx
);
1990 expand_errno_check (exp
, target
);
1992 /* Output the entire sequence. */
1993 insns
= get_insns ();
2000 /* Expand a call to the builtin sin and cos math functions.
2001 Return 0 if a normal call should be emitted rather than expanding the
2002 function in-line. EXP is the expression that is a call to the builtin
2003 function; if convenient, the result should be placed in TARGET.
2004 SUBTARGET may be used as the target for computing one of EXP's
2008 expand_builtin_mathfn_3 (tree exp
, rtx target
, rtx subtarget
)
2010 optab builtin_optab
;
2012 tree fndecl
= get_callee_fndecl (exp
);
2013 tree arglist
= TREE_OPERAND (exp
, 1);
2014 enum machine_mode mode
;
2015 bool errno_set
= false;
2018 if (!validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
2021 arg
= TREE_VALUE (arglist
);
2023 switch (DECL_FUNCTION_CODE (fndecl
))
2031 builtin_optab
= sincos_optab
; break;
2036 /* Make a suitable register to place result in. */
2037 mode
= TYPE_MODE (TREE_TYPE (exp
));
2039 if (! flag_errno_math
|| ! HONOR_NANS (mode
))
2042 /* Check if sincos insn is available, otherwise fallback
2043 to sin or cos insn. */
2044 if (builtin_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
) {
2045 switch (DECL_FUNCTION_CODE (fndecl
))
2050 builtin_optab
= sin_optab
; break;
2054 builtin_optab
= cos_optab
; break;
2060 /* Before working hard, check whether the instruction is available. */
2061 if (builtin_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2063 target
= gen_reg_rtx (mode
);
2065 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2066 need to expand the argument again. This way, we will not perform
2067 side-effects more the once. */
2068 narg
= save_expr (arg
);
2072 arglist
= build_tree_list (NULL_TREE
, arg
);
2073 exp
= build_function_call_expr (fndecl
, arglist
);
2076 op0
= expand_expr (arg
, subtarget
, VOIDmode
, 0);
2080 /* Compute into TARGET.
2081 Set TARGET to wherever the result comes back. */
2082 if (builtin_optab
== sincos_optab
)
2086 switch (DECL_FUNCTION_CODE (fndecl
))
2091 result
= expand_twoval_unop (builtin_optab
, op0
, 0, target
, 0);
2096 result
= expand_twoval_unop (builtin_optab
, op0
, target
, 0, 0);
2101 gcc_assert (result
);
2105 target
= expand_unop (mode
, builtin_optab
, op0
, target
, 0);
2111 expand_errno_check (exp
, target
);
2113 /* Output the entire sequence. */
2114 insns
= get_insns ();
2120 /* If we were unable to expand via the builtin, stop the sequence
2121 (without outputting the insns) and call to the library function
2122 with the stabilized argument list. */
2126 target
= expand_call (exp
, target
, target
== const0_rtx
);
2131 /* Expand a call to one of the builtin rounding functions (lfloor).
2132 If expanding via optab fails, lower expression to (int)(floor(x)).
2133 EXP is the expression that is a call to the builtin function;
2134 if convenient, the result should be placed in TARGET. SUBTARGET may
2135 be used as the target for computing one of EXP's operands. */
2138 expand_builtin_int_roundingfn (tree exp
, rtx target
, rtx subtarget
)
2140 optab builtin_optab
;
2141 rtx op0
, insns
, tmp
;
2142 tree fndecl
= get_callee_fndecl (exp
);
2143 tree arglist
= TREE_OPERAND (exp
, 1);
2144 enum built_in_function fallback_fn
;
2145 tree fallback_fndecl
;
2146 enum machine_mode mode
;
2149 if (!validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
2152 arg
= TREE_VALUE (arglist
);
2154 switch (DECL_FUNCTION_CODE (fndecl
))
2156 case BUILT_IN_LCEIL
:
2157 case BUILT_IN_LCEILF
:
2158 case BUILT_IN_LCEILL
:
2159 case BUILT_IN_LLCEIL
:
2160 case BUILT_IN_LLCEILF
:
2161 case BUILT_IN_LLCEILL
:
2162 builtin_optab
= lceil_optab
;
2163 fallback_fn
= BUILT_IN_CEIL
;
2166 case BUILT_IN_LFLOOR
:
2167 case BUILT_IN_LFLOORF
:
2168 case BUILT_IN_LFLOORL
:
2169 case BUILT_IN_LLFLOOR
:
2170 case BUILT_IN_LLFLOORF
:
2171 case BUILT_IN_LLFLOORL
:
2172 builtin_optab
= lfloor_optab
;
2173 fallback_fn
= BUILT_IN_FLOOR
;
2180 /* Make a suitable register to place result in. */
2181 mode
= TYPE_MODE (TREE_TYPE (exp
));
2183 /* Before working hard, check whether the instruction is available. */
2184 if (builtin_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
)
2186 target
= gen_reg_rtx (mode
);
2188 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2189 need to expand the argument again. This way, we will not perform
2190 side-effects more the once. */
2191 narg
= builtin_save_expr (arg
);
2195 arglist
= build_tree_list (NULL_TREE
, arg
);
2196 exp
= build_function_call_expr (fndecl
, arglist
);
2199 op0
= expand_expr (arg
, subtarget
, VOIDmode
, 0);
2203 /* Compute into TARGET.
2204 Set TARGET to wherever the result comes back. */
2205 target
= expand_unop (mode
, builtin_optab
, op0
, target
, 0);
2209 /* Output the entire sequence. */
2210 insns
= get_insns ();
2216 /* If we were unable to expand via the builtin, stop the sequence
2217 (without outputting the insns). */
2221 /* Fall back to floating point rounding optab. */
2222 fallback_fndecl
= mathfn_built_in (TREE_TYPE (arg
), fallback_fn
);
2223 /* We shouldn't get here on targets without TARGET_C99_FUNCTIONS.
2224 ??? Perhaps convert (int)floorf(x) into (int)floor((double)x). */
2225 gcc_assert (fallback_fndecl
!= NULL_TREE
);
2226 exp
= build_function_call_expr (fallback_fndecl
, arglist
);
2228 tmp
= expand_builtin_mathfn (exp
, NULL_RTX
, NULL_RTX
);
2230 /* Truncate the result of floating point optab to integer
2231 via expand_fix (). */
2232 target
= gen_reg_rtx (mode
);
2233 expand_fix (target
, tmp
, 0);
2238 /* To evaluate powi(x,n), the floating point value x raised to the
2239 constant integer exponent n, we use a hybrid algorithm that
2240 combines the "window method" with look-up tables. For an
2241 introduction to exponentiation algorithms and "addition chains",
2242 see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2243 "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2244 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2245 Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */
2247 /* Provide a default value for POWI_MAX_MULTS, the maximum number of
2248 multiplications to inline before calling the system library's pow
2249 function. powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2250 so this default never requires calling pow, powf or powl. */
2252 #ifndef POWI_MAX_MULTS
2253 #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2)
2256 /* The size of the "optimal power tree" lookup table. All
2257 exponents less than this value are simply looked up in the
2258 powi_table below. This threshold is also used to size the
2259 cache of pseudo registers that hold intermediate results. */
2260 #define POWI_TABLE_SIZE 256
2262 /* The size, in bits of the window, used in the "window method"
2263 exponentiation algorithm. This is equivalent to a radix of
2264 (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */
2265 #define POWI_WINDOW_SIZE 3
2267 /* The following table is an efficient representation of an
2268 "optimal power tree". For each value, i, the corresponding
2269 value, j, in the table states than an optimal evaluation
2270 sequence for calculating pow(x,i) can be found by evaluating
2271 pow(x,j)*pow(x,i-j). An optimal power tree for the first
2272 100 integers is given in Knuth's "Seminumerical algorithms". */
2274 static const unsigned char powi_table
[POWI_TABLE_SIZE
] =
2276 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */
2277 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */
2278 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */
2279 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */
2280 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */
2281 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */
2282 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */
2283 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */
2284 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */
2285 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */
2286 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */
2287 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */
2288 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */
2289 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */
2290 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */
2291 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */
2292 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */
2293 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */
2294 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */
2295 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */
2296 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */
2297 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */
2298 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */
2299 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */
2300 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */
2301 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */
2302 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */
2303 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */
2304 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */
2305 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */
2306 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */
2307 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */
2311 /* Return the number of multiplications required to calculate
2312 powi(x,n) where n is less than POWI_TABLE_SIZE. This is a
2313 subroutine of powi_cost. CACHE is an array indicating
2314 which exponents have already been calculated. */
2317 powi_lookup_cost (unsigned HOST_WIDE_INT n
, bool *cache
)
2319 /* If we've already calculated this exponent, then this evaluation
2320 doesn't require any additional multiplications. */
2325 return powi_lookup_cost (n
- powi_table
[n
], cache
)
2326 + powi_lookup_cost (powi_table
[n
], cache
) + 1;
2329 /* Return the number of multiplications required to calculate
2330 powi(x,n) for an arbitrary x, given the exponent N. This
2331 function needs to be kept in sync with expand_powi below. */
2334 powi_cost (HOST_WIDE_INT n
)
2336 bool cache
[POWI_TABLE_SIZE
];
2337 unsigned HOST_WIDE_INT digit
;
2338 unsigned HOST_WIDE_INT val
;
2344 /* Ignore the reciprocal when calculating the cost. */
2345 val
= (n
< 0) ? -n
: n
;
2347 /* Initialize the exponent cache. */
2348 memset (cache
, 0, POWI_TABLE_SIZE
* sizeof (bool));
2353 while (val
>= POWI_TABLE_SIZE
)
2357 digit
= val
& ((1 << POWI_WINDOW_SIZE
) - 1);
2358 result
+= powi_lookup_cost (digit
, cache
)
2359 + POWI_WINDOW_SIZE
+ 1;
2360 val
>>= POWI_WINDOW_SIZE
;
2369 return result
+ powi_lookup_cost (val
, cache
);
2372 /* Recursive subroutine of expand_powi. This function takes the array,
2373 CACHE, of already calculated exponents and an exponent N and returns
2374 an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */
2377 expand_powi_1 (enum machine_mode mode
, unsigned HOST_WIDE_INT n
, rtx
*cache
)
2379 unsigned HOST_WIDE_INT digit
;
2383 if (n
< POWI_TABLE_SIZE
)
2388 target
= gen_reg_rtx (mode
);
2391 op0
= expand_powi_1 (mode
, n
- powi_table
[n
], cache
);
2392 op1
= expand_powi_1 (mode
, powi_table
[n
], cache
);
2396 target
= gen_reg_rtx (mode
);
2397 digit
= n
& ((1 << POWI_WINDOW_SIZE
) - 1);
2398 op0
= expand_powi_1 (mode
, n
- digit
, cache
);
2399 op1
= expand_powi_1 (mode
, digit
, cache
);
2403 target
= gen_reg_rtx (mode
);
2404 op0
= expand_powi_1 (mode
, n
>> 1, cache
);
2408 result
= expand_mult (mode
, op0
, op1
, target
, 0);
2409 if (result
!= target
)
2410 emit_move_insn (target
, result
);
2414 /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the
2415 floating point operand in mode MODE, and N is the exponent. This
2416 function needs to be kept in sync with powi_cost above. */
2419 expand_powi (rtx x
, enum machine_mode mode
, HOST_WIDE_INT n
)
2421 unsigned HOST_WIDE_INT val
;
2422 rtx cache
[POWI_TABLE_SIZE
];
2426 return CONST1_RTX (mode
);
2428 val
= (n
< 0) ? -n
: n
;
2430 memset (cache
, 0, sizeof (cache
));
2433 result
= expand_powi_1 (mode
, (n
< 0) ? -n
: n
, cache
);
2435 /* If the original exponent was negative, reciprocate the result. */
2437 result
= expand_binop (mode
, sdiv_optab
, CONST1_RTX (mode
),
2438 result
, NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
2443 /* Expand a call to the pow built-in mathematical function. Return 0 if
2444 a normal call should be emitted rather than expanding the function
2445 in-line. EXP is the expression that is a call to the builtin
2446 function; if convenient, the result should be placed in TARGET. */
2449 expand_builtin_pow (tree exp
, rtx target
, rtx subtarget
)
2451 tree arglist
= TREE_OPERAND (exp
, 1);
2454 if (! validate_arglist (arglist
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
2457 arg0
= TREE_VALUE (arglist
);
2458 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
2460 if (TREE_CODE (arg1
) == REAL_CST
2461 && ! TREE_CONSTANT_OVERFLOW (arg1
))
2463 REAL_VALUE_TYPE cint
;
2467 c
= TREE_REAL_CST (arg1
);
2468 n
= real_to_integer (&c
);
2469 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
2470 if (real_identical (&c
, &cint
))
2472 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
2473 Otherwise, check the number of multiplications required.
2474 Note that pow never sets errno for an integer exponent. */
2475 if ((n
>= -1 && n
<= 2)
2476 || (flag_unsafe_math_optimizations
2478 && powi_cost (n
) <= POWI_MAX_MULTS
))
2480 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
2481 rtx op
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
2482 op
= force_reg (mode
, op
);
2483 return expand_powi (op
, mode
, n
);
2488 if (! flag_unsafe_math_optimizations
)
2490 return expand_builtin_mathfn_2 (exp
, target
, subtarget
);
2493 /* Expand a call to the powi built-in mathematical function. Return 0 if
2494 a normal call should be emitted rather than expanding the function
2495 in-line. EXP is the expression that is a call to the builtin
2496 function; if convenient, the result should be placed in TARGET. */
2499 expand_builtin_powi (tree exp
, rtx target
, rtx subtarget
)
2501 tree arglist
= TREE_OPERAND (exp
, 1);
2504 enum machine_mode mode
;
2505 enum machine_mode mode2
;
2507 if (! validate_arglist (arglist
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2510 arg0
= TREE_VALUE (arglist
);
2511 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
2512 mode
= TYPE_MODE (TREE_TYPE (exp
));
2514 /* Handle constant power. */
2516 if (TREE_CODE (arg1
) == INTEGER_CST
2517 && ! TREE_CONSTANT_OVERFLOW (arg1
))
2519 HOST_WIDE_INT n
= TREE_INT_CST_LOW (arg1
);
2521 /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
2522 Otherwise, check the number of multiplications required. */
2523 if ((TREE_INT_CST_HIGH (arg1
) == 0
2524 || TREE_INT_CST_HIGH (arg1
) == -1)
2525 && ((n
>= -1 && n
<= 2)
2527 && powi_cost (n
) <= POWI_MAX_MULTS
)))
2529 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
2530 op0
= force_reg (mode
, op0
);
2531 return expand_powi (op0
, mode
, n
);
2535 /* Emit a libcall to libgcc. */
2537 /* Mode of the 2nd argument must match that of an int. */
2538 mode2
= mode_for_size (INT_TYPE_SIZE
, MODE_INT
, 0);
2540 if (target
== NULL_RTX
)
2541 target
= gen_reg_rtx (mode
);
2543 op0
= expand_expr (arg0
, subtarget
, mode
, 0);
2544 if (GET_MODE (op0
) != mode
)
2545 op0
= convert_to_mode (mode
, op0
, 0);
2546 op1
= expand_expr (arg1
, 0, mode2
, 0);
2547 if (GET_MODE (op1
) != mode2
)
2548 op1
= convert_to_mode (mode2
, op1
, 0);
2550 target
= emit_library_call_value (powi_optab
->handlers
[(int) mode
].libfunc
,
2551 target
, LCT_CONST_MAKE_BLOCK
, mode
, 2,
2552 op0
, mode
, op1
, mode2
);
2557 /* Expand expression EXP which is a call to the strlen builtin. Return 0
2558 if we failed the caller should emit a normal call, otherwise
2559 try to get the result in TARGET, if convenient. */
2562 expand_builtin_strlen (tree arglist
, rtx target
,
2563 enum machine_mode target_mode
)
2565 if (!validate_arglist (arglist
, POINTER_TYPE
, VOID_TYPE
))
2570 tree len
, src
= TREE_VALUE (arglist
);
2571 rtx result
, src_reg
, char_rtx
, before_strlen
;
2572 enum machine_mode insn_mode
= target_mode
, char_mode
;
2573 enum insn_code icode
= CODE_FOR_nothing
;
2576 /* If the length can be computed at compile-time, return it. */
2577 len
= c_strlen (src
, 0);
2579 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2581 /* If the length can be computed at compile-time and is constant
2582 integer, but there are side-effects in src, evaluate
2583 src for side-effects, then return len.
2584 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2585 can be optimized into: i++; x = 3; */
2586 len
= c_strlen (src
, 1);
2587 if (len
&& TREE_CODE (len
) == INTEGER_CST
)
2589 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2590 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
2593 align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
2595 /* If SRC is not a pointer type, don't do this operation inline. */
2599 /* Bail out if we can't compute strlen in the right mode. */
2600 while (insn_mode
!= VOIDmode
)
2602 icode
= strlen_optab
->handlers
[(int) insn_mode
].insn_code
;
2603 if (icode
!= CODE_FOR_nothing
)
2606 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
2608 if (insn_mode
== VOIDmode
)
2611 /* Make a place to write the result of the instruction. */
2615 && GET_MODE (result
) == insn_mode
2616 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
2617 result
= gen_reg_rtx (insn_mode
);
2619 /* Make a place to hold the source address. We will not expand
2620 the actual source until we are sure that the expansion will
2621 not fail -- there are trees that cannot be expanded twice. */
2622 src_reg
= gen_reg_rtx (Pmode
);
2624 /* Mark the beginning of the strlen sequence so we can emit the
2625 source operand later. */
2626 before_strlen
= get_last_insn ();
2628 char_rtx
= const0_rtx
;
2629 char_mode
= insn_data
[(int) icode
].operand
[2].mode
;
2630 if (! (*insn_data
[(int) icode
].operand
[2].predicate
) (char_rtx
,
2632 char_rtx
= copy_to_mode_reg (char_mode
, char_rtx
);
2634 pat
= GEN_FCN (icode
) (result
, gen_rtx_MEM (BLKmode
, src_reg
),
2635 char_rtx
, GEN_INT (align
));
2640 /* Now that we are assured of success, expand the source. */
2642 pat
= expand_expr (src
, src_reg
, ptr_mode
, EXPAND_NORMAL
);
2644 emit_move_insn (src_reg
, pat
);
2649 emit_insn_after (pat
, before_strlen
);
2651 emit_insn_before (pat
, get_insns ());
2653 /* Return the value in the proper mode for this function. */
2654 if (GET_MODE (result
) == target_mode
)
2656 else if (target
!= 0)
2657 convert_move (target
, result
, 0);
2659 target
= convert_to_mode (target_mode
, result
, 0);
2665 /* Expand a call to the strstr builtin. Return 0 if we failed the
2666 caller should emit a normal call, otherwise try to get the result
2667 in TARGET, if convenient (and in mode MODE if that's convenient). */
2670 expand_builtin_strstr (tree arglist
, tree type
, rtx target
, enum machine_mode mode
)
2672 if (validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2674 tree result
= fold_builtin_strstr (arglist
, type
);
2676 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
2681 /* Expand a call to the strchr builtin. Return 0 if we failed the
2682 caller should emit a normal call, otherwise try to get the result
2683 in TARGET, if convenient (and in mode MODE if that's convenient). */
2686 expand_builtin_strchr (tree arglist
, tree type
, rtx target
, enum machine_mode mode
)
2688 if (validate_arglist (arglist
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2690 tree result
= fold_builtin_strchr (arglist
, type
);
2692 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
2694 /* FIXME: Should use strchrM optab so that ports can optimize this. */
2699 /* Expand a call to the strrchr builtin. Return 0 if we failed the
2700 caller should emit a normal call, otherwise try to get the result
2701 in TARGET, if convenient (and in mode MODE if that's convenient). */
2704 expand_builtin_strrchr (tree arglist
, tree type
, rtx target
, enum machine_mode mode
)
2706 if (validate_arglist (arglist
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2708 tree result
= fold_builtin_strrchr (arglist
, type
);
2710 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
2715 /* Expand a call to the strpbrk builtin. Return 0 if we failed the
2716 caller should emit a normal call, otherwise try to get the result
2717 in TARGET, if convenient (and in mode MODE if that's convenient). */
2720 expand_builtin_strpbrk (tree arglist
, tree type
, rtx target
, enum machine_mode mode
)
2722 if (validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2724 tree result
= fold_builtin_strpbrk (arglist
, type
);
2726 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
2731 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2732 bytes from constant string DATA + OFFSET and return it as target
2736 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
2737 enum machine_mode mode
)
2739 const char *str
= (const char *) data
;
2741 gcc_assert (offset
>= 0
2742 && ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
2743 <= strlen (str
) + 1));
2745 return c_readstr (str
+ offset
, mode
);
2748 /* Expand a call to the memcpy builtin, with arguments in ARGLIST.
2749 Return 0 if we failed, the caller should emit a normal call,
2750 otherwise try to get the result in TARGET, if convenient (and in
2751 mode MODE if that's convenient). */
2753 expand_builtin_memcpy (tree exp
, rtx target
, enum machine_mode mode
)
2755 tree fndecl
= get_callee_fndecl (exp
);
2756 tree arglist
= TREE_OPERAND (exp
, 1);
2757 if (!validate_arglist (arglist
,
2758 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2762 tree dest
= TREE_VALUE (arglist
);
2763 tree src
= TREE_VALUE (TREE_CHAIN (arglist
));
2764 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
2765 const char *src_str
;
2766 unsigned int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
2767 unsigned int dest_align
2768 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
2769 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
2770 tree result
= fold_builtin_memcpy (fndecl
, arglist
);
2773 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
2775 /* If DEST is not a pointer type, call the normal function. */
2776 if (dest_align
== 0)
2779 /* If either SRC is not a pointer type, don't do this
2780 operation in-line. */
2784 dest_mem
= get_memory_rtx (dest
);
2785 set_mem_align (dest_mem
, dest_align
);
2786 len_rtx
= expand_expr (len
, NULL_RTX
, VOIDmode
, 0);
2787 src_str
= c_getstr (src
);
2789 /* If SRC is a string constant and block move would be done
2790 by pieces, we can avoid loading the string from memory
2791 and only stored the computed constants. */
2793 && GET_CODE (len_rtx
) == CONST_INT
2794 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
2795 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
2796 (void *) src_str
, dest_align
))
2798 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
2799 builtin_memcpy_read_str
,
2800 (void *) src_str
, dest_align
, 0);
2801 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
2802 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
2806 src_mem
= get_memory_rtx (src
);
2807 set_mem_align (src_mem
, src_align
);
2809 /* Copy word part most expediently. */
2810 dest_addr
= emit_block_move (dest_mem
, src_mem
, len_rtx
,
2815 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
2816 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
2822 /* Expand a call to the mempcpy builtin, with arguments in ARGLIST.
2823 Return 0 if we failed the caller should emit a normal call,
2824 otherwise try to get the result in TARGET, if convenient (and in
2825 mode MODE if that's convenient). If ENDP is 0 return the
2826 destination pointer, if ENDP is 1 return the end pointer ala
2827 mempcpy, and if ENDP is 2 return the end pointer minus one ala
2831 expand_builtin_mempcpy (tree arglist
, tree type
, rtx target
, enum machine_mode mode
,
2834 if (!validate_arglist (arglist
,
2835 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2837 /* If return value is ignored, transform mempcpy into memcpy. */
2838 else if (target
== const0_rtx
)
2840 tree fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
2845 return expand_expr (build_function_call_expr (fn
, arglist
),
2846 target
, mode
, EXPAND_NORMAL
);
2850 tree dest
= TREE_VALUE (arglist
);
2851 tree src
= TREE_VALUE (TREE_CHAIN (arglist
));
2852 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
2853 const char *src_str
;
2854 unsigned int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
2855 unsigned int dest_align
2856 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
2857 rtx dest_mem
, src_mem
, len_rtx
;
2858 tree result
= fold_builtin_mempcpy (arglist
, type
, endp
);
2861 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
2863 /* If either SRC or DEST is not a pointer type, don't do this
2864 operation in-line. */
2865 if (dest_align
== 0 || src_align
== 0)
2868 /* If LEN is not constant, call the normal function. */
2869 if (! host_integerp (len
, 1))
2872 len_rtx
= expand_expr (len
, NULL_RTX
, VOIDmode
, 0);
2873 src_str
= c_getstr (src
);
2875 /* If SRC is a string constant and block move would be done
2876 by pieces, we can avoid loading the string from memory
2877 and only stored the computed constants. */
2879 && GET_CODE (len_rtx
) == CONST_INT
2880 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
2881 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
2882 (void *) src_str
, dest_align
))
2884 dest_mem
= get_memory_rtx (dest
);
2885 set_mem_align (dest_mem
, dest_align
);
2886 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
2887 builtin_memcpy_read_str
,
2888 (void *) src_str
, dest_align
, endp
);
2889 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
2890 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
2894 if (GET_CODE (len_rtx
) == CONST_INT
2895 && can_move_by_pieces (INTVAL (len_rtx
),
2896 MIN (dest_align
, src_align
)))
2898 dest_mem
= get_memory_rtx (dest
);
2899 set_mem_align (dest_mem
, dest_align
);
2900 src_mem
= get_memory_rtx (src
);
2901 set_mem_align (src_mem
, src_align
);
2902 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
2903 MIN (dest_align
, src_align
), endp
);
2904 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
2905 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
2913 /* Expand expression EXP, which is a call to the memmove builtin. Return 0
2914 if we failed the caller should emit a normal call. */
2917 expand_builtin_memmove (tree arglist
, tree type
, rtx target
,
2918 enum machine_mode mode
)
2920 if (!validate_arglist (arglist
,
2921 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2925 tree dest
= TREE_VALUE (arglist
);
2926 tree src
= TREE_VALUE (TREE_CHAIN (arglist
));
2927 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
2929 unsigned int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
2930 unsigned int dest_align
2931 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
2932 tree result
= fold_builtin_memmove (arglist
, type
);
2935 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
2937 /* If DEST is not a pointer type, call the normal function. */
2938 if (dest_align
== 0)
2941 /* If either SRC is not a pointer type, don't do this
2942 operation in-line. */
2946 /* If src is categorized for a readonly section we can use
2948 if (readonly_data_expr (src
))
2950 tree
const fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
2953 return expand_expr (build_function_call_expr (fn
, arglist
),
2954 target
, mode
, EXPAND_NORMAL
);
2957 /* If length is 1 and we can expand memcpy call inline,
2958 it is ok to use memcpy as well. */
2959 if (integer_onep (len
))
2961 rtx ret
= expand_builtin_mempcpy (arglist
, type
, target
, mode
,
2967 /* Otherwise, call the normal function. */
2972 /* Expand expression EXP, which is a call to the bcopy builtin. Return 0
2973 if we failed the caller should emit a normal call. */
2976 expand_builtin_bcopy (tree arglist
, tree type
)
2978 tree src
, dest
, size
, newarglist
;
2980 if (!validate_arglist (arglist
,
2981 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2984 src
= TREE_VALUE (arglist
);
2985 dest
= TREE_VALUE (TREE_CHAIN (arglist
));
2986 size
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
2988 /* New argument list transforming bcopy(ptr x, ptr y, int z) to
2989 memmove(ptr y, ptr x, size_t z). This is done this way
2990 so that if it isn't expanded inline, we fallback to
2991 calling bcopy instead of memmove. */
2993 newarglist
= build_tree_list (NULL_TREE
, fold_convert (sizetype
, size
));
2994 newarglist
= tree_cons (NULL_TREE
, src
, newarglist
);
2995 newarglist
= tree_cons (NULL_TREE
, dest
, newarglist
);
2997 return expand_builtin_memmove (newarglist
, type
, const0_rtx
, VOIDmode
);
3001 # define HAVE_movstr 0
3002 # define CODE_FOR_movstr CODE_FOR_nothing
3005 /* Expand into a movstr instruction, if one is available. Return 0 if
3006 we failed, the caller should emit a normal call, otherwise try to
3007 get the result in TARGET, if convenient. If ENDP is 0 return the
3008 destination pointer, if ENDP is 1 return the end pointer ala
3009 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3013 expand_movstr (tree dest
, tree src
, rtx target
, int endp
)
3019 const struct insn_data
* data
;
3024 dest_mem
= get_memory_rtx (dest
);
3025 src_mem
= get_memory_rtx (src
);
3028 target
= force_reg (Pmode
, XEXP (dest_mem
, 0));
3029 dest_mem
= replace_equiv_address (dest_mem
, target
);
3030 end
= gen_reg_rtx (Pmode
);
3034 if (target
== 0 || target
== const0_rtx
)
3036 end
= gen_reg_rtx (Pmode
);
3044 data
= insn_data
+ CODE_FOR_movstr
;
3046 if (data
->operand
[0].mode
!= VOIDmode
)
3047 end
= gen_lowpart (data
->operand
[0].mode
, end
);
3049 insn
= data
->genfun (end
, dest_mem
, src_mem
);
3055 /* movstr is supposed to set end to the address of the NUL
3056 terminator. If the caller requested a mempcpy-like return value,
3058 if (endp
== 1 && target
!= const0_rtx
)
3060 rtx tem
= plus_constant (gen_lowpart (GET_MODE (target
), end
), 1);
3061 emit_move_insn (target
, force_operand (tem
, NULL_RTX
));
3067 /* Expand expression EXP, which is a call to the strcpy builtin. Return 0
3068 if we failed the caller should emit a normal call, otherwise try to get
3069 the result in TARGET, if convenient (and in mode MODE if that's
3073 expand_builtin_strcpy (tree exp
, rtx target
, enum machine_mode mode
)
3075 tree fndecl
= get_callee_fndecl (exp
);
3076 tree arglist
= TREE_OPERAND (exp
, 1);
3077 if (validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3079 tree result
= fold_builtin_strcpy (fndecl
, arglist
, 0);
3081 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3083 return expand_movstr (TREE_VALUE (arglist
),
3084 TREE_VALUE (TREE_CHAIN (arglist
)),
3085 target
, /*endp=*/0);
3090 /* Expand a call to the stpcpy builtin, with arguments in ARGLIST.
3091 Return 0 if we failed the caller should emit a normal call,
3092 otherwise try to get the result in TARGET, if convenient (and in
3093 mode MODE if that's convenient). */
3096 expand_builtin_stpcpy (tree exp
, rtx target
, enum machine_mode mode
)
3098 tree arglist
= TREE_OPERAND (exp
, 1);
3099 /* If return value is ignored, transform stpcpy into strcpy. */
3100 if (target
== const0_rtx
)
3102 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
3106 return expand_expr (build_function_call_expr (fn
, arglist
),
3107 target
, mode
, EXPAND_NORMAL
);
3110 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3114 tree dst
, src
, len
, lenp1
;
3118 /* Ensure we get an actual string whose length can be evaluated at
3119 compile-time, not an expression containing a string. This is
3120 because the latter will potentially produce pessimized code
3121 when used to produce the return value. */
3122 src
= TREE_VALUE (TREE_CHAIN (arglist
));
3123 if (! c_getstr (src
) || ! (len
= c_strlen (src
, 0)))
3124 return expand_movstr (TREE_VALUE (arglist
),
3125 TREE_VALUE (TREE_CHAIN (arglist
)),
3126 target
, /*endp=*/2);
3128 dst
= TREE_VALUE (arglist
);
3129 lenp1
= size_binop (PLUS_EXPR
, len
, ssize_int (1));
3130 narglist
= build_tree_list (NULL_TREE
, lenp1
);
3131 narglist
= tree_cons (NULL_TREE
, src
, narglist
);
3132 narglist
= tree_cons (NULL_TREE
, dst
, narglist
);
3133 ret
= expand_builtin_mempcpy (narglist
, TREE_TYPE (exp
),
3134 target
, mode
, /*endp=*/2);
3139 if (TREE_CODE (len
) == INTEGER_CST
)
3141 rtx len_rtx
= expand_expr (len
, NULL_RTX
, VOIDmode
, 0);
3143 if (GET_CODE (len_rtx
) == CONST_INT
)
3145 ret
= expand_builtin_strcpy (exp
, target
, mode
);
3151 if (mode
!= VOIDmode
)
3152 target
= gen_reg_rtx (mode
);
3154 target
= gen_reg_rtx (GET_MODE (ret
));
3156 if (GET_MODE (target
) != GET_MODE (ret
))
3157 ret
= gen_lowpart (GET_MODE (target
), ret
);
3159 ret
= plus_constant (ret
, INTVAL (len_rtx
));
3160 ret
= emit_move_insn (target
, force_operand (ret
, NULL_RTX
));
3168 return expand_movstr (TREE_VALUE (arglist
),
3169 TREE_VALUE (TREE_CHAIN (arglist
)),
3170 target
, /*endp=*/2);
3174 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3175 bytes from constant string DATA + OFFSET and return it as target
3179 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
3180 enum machine_mode mode
)
3182 const char *str
= (const char *) data
;
3184 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
3187 return c_readstr (str
+ offset
, mode
);
3190 /* Expand expression EXP, which is a call to the strncpy builtin. Return 0
3191 if we failed the caller should emit a normal call. */
3194 expand_builtin_strncpy (tree exp
, rtx target
, enum machine_mode mode
)
3196 tree fndecl
= get_callee_fndecl (exp
);
3197 tree arglist
= TREE_OPERAND (exp
, 1);
3198 if (validate_arglist (arglist
,
3199 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3201 tree slen
= c_strlen (TREE_VALUE (TREE_CHAIN (arglist
)), 1);
3202 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
3203 tree result
= fold_builtin_strncpy (fndecl
, arglist
, slen
);
3206 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3208 /* We must be passed a constant len and src parameter. */
3209 if (!host_integerp (len
, 1) || !slen
|| !host_integerp (slen
, 1))
3212 slen
= size_binop (PLUS_EXPR
, slen
, ssize_int (1));
3214 /* We're required to pad with trailing zeros if the requested
3215 len is greater than strlen(s2)+1. In that case try to
3216 use store_by_pieces, if it fails, punt. */
3217 if (tree_int_cst_lt (slen
, len
))
3219 tree dest
= TREE_VALUE (arglist
);
3220 unsigned int dest_align
3221 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3222 const char *p
= c_getstr (TREE_VALUE (TREE_CHAIN (arglist
)));
3225 if (!p
|| dest_align
== 0 || !host_integerp (len
, 1)
3226 || !can_store_by_pieces (tree_low_cst (len
, 1),
3227 builtin_strncpy_read_str
,
3228 (void *) p
, dest_align
))
3231 dest_mem
= get_memory_rtx (dest
);
3232 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3233 builtin_strncpy_read_str
,
3234 (void *) p
, dest_align
, 0);
3235 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3236 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3243 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3244 bytes from constant string DATA + OFFSET and return it as target
3248 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3249 enum machine_mode mode
)
3251 const char *c
= (const char *) data
;
3252 char *p
= alloca (GET_MODE_SIZE (mode
));
3254 memset (p
, *c
, GET_MODE_SIZE (mode
));
3256 return c_readstr (p
, mode
);
3259 /* Callback routine for store_by_pieces. Return the RTL of a register
3260 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3261 char value given in the RTL register data. For example, if mode is
3262 4 bytes wide, return the RTL for 0x01010101*data. */
3265 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
3266 enum machine_mode mode
)
3272 size
= GET_MODE_SIZE (mode
);
3277 memset (p
, 1, size
);
3278 coeff
= c_readstr (p
, mode
);
3280 target
= convert_to_mode (mode
, (rtx
) data
, 1);
3281 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
3282 return force_reg (mode
, target
);
3285 /* Expand expression EXP, which is a call to the memset builtin. Return 0
3286 if we failed the caller should emit a normal call, otherwise try to get
3287 the result in TARGET, if convenient (and in mode MODE if that's
3291 expand_builtin_memset (tree arglist
, rtx target
, enum machine_mode mode
)
3293 if (!validate_arglist (arglist
,
3294 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3298 tree dest
= TREE_VALUE (arglist
);
3299 tree val
= TREE_VALUE (TREE_CHAIN (arglist
));
3300 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
3303 unsigned int dest_align
3304 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
3305 rtx dest_mem
, dest_addr
, len_rtx
;
3307 /* If DEST is not a pointer type, don't do this
3308 operation in-line. */
3309 if (dest_align
== 0)
3312 /* If the LEN parameter is zero, return DEST. */
3313 if (integer_zerop (len
))
3315 /* Evaluate and ignore VAL in case it has side-effects. */
3316 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3317 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
3320 if (TREE_CODE (val
) != INTEGER_CST
)
3324 if (!host_integerp (len
, 1))
3327 if (optimize_size
&& tree_low_cst (len
, 1) > 1)
3330 /* Assume that we can memset by pieces if we can store the
3331 * the coefficients by pieces (in the required modes).
3332 * We can't pass builtin_memset_gen_str as that emits RTL. */
3334 if (!can_store_by_pieces (tree_low_cst (len
, 1),
3335 builtin_memset_read_str
,
3339 val
= fold (build1 (CONVERT_EXPR
, unsigned_char_type_node
, val
));
3340 val_rtx
= expand_expr (val
, NULL_RTX
, VOIDmode
, 0);
3341 val_rtx
= force_reg (TYPE_MODE (unsigned_char_type_node
),
3343 dest_mem
= get_memory_rtx (dest
);
3344 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3345 builtin_memset_gen_str
,
3346 val_rtx
, dest_align
, 0);
3347 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3348 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3352 if (target_char_cast (val
, &c
))
3357 if (!host_integerp (len
, 1))
3359 if (!can_store_by_pieces (tree_low_cst (len
, 1),
3360 builtin_memset_read_str
, &c
,
3364 dest_mem
= get_memory_rtx (dest
);
3365 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
3366 builtin_memset_read_str
,
3368 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3369 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
3373 len_rtx
= expand_expr (len
, NULL_RTX
, VOIDmode
, 0);
3375 dest_mem
= get_memory_rtx (dest
);
3376 set_mem_align (dest_mem
, dest_align
);
3377 dest_addr
= clear_storage (dest_mem
, len_rtx
);
3381 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
3382 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
3389 /* Expand expression EXP, which is a call to the bzero builtin. Return 0
3390 if we failed the caller should emit a normal call. */
3393 expand_builtin_bzero (tree arglist
)
3395 tree dest
, size
, newarglist
;
3397 if (!validate_arglist (arglist
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3400 dest
= TREE_VALUE (arglist
);
3401 size
= TREE_VALUE (TREE_CHAIN (arglist
));
3403 /* New argument list transforming bzero(ptr x, int y) to
3404 memset(ptr x, int 0, size_t y). This is done this way
3405 so that if it isn't expanded inline, we fallback to
3406 calling bzero instead of memset. */
3408 newarglist
= build_tree_list (NULL_TREE
, fold_convert (sizetype
, size
));
3409 newarglist
= tree_cons (NULL_TREE
, integer_zero_node
, newarglist
);
3410 newarglist
= tree_cons (NULL_TREE
, dest
, newarglist
);
3412 return expand_builtin_memset (newarglist
, const0_rtx
, VOIDmode
);
3415 /* Expand expression EXP, which is a call to the memcmp built-in function.
3416 ARGLIST is the argument list for this call. Return 0 if we failed and the
3417 caller should emit a normal call, otherwise try to get the result in
3418 TARGET, if convenient (and in mode MODE, if that's convenient). */
3421 expand_builtin_memcmp (tree exp ATTRIBUTE_UNUSED
, tree arglist
, rtx target
,
3422 enum machine_mode mode
)
3424 if (!validate_arglist (arglist
,
3425 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3429 tree result
= fold_builtin_memcmp (arglist
);
3431 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3434 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrsi
3436 tree arg1
= TREE_VALUE (arglist
);
3437 tree arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
3438 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
3439 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3444 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
3446 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
3447 enum machine_mode insn_mode
;
3449 #ifdef HAVE_cmpmemsi
3451 insn_mode
= insn_data
[(int) CODE_FOR_cmpmemsi
].operand
[0].mode
;
3454 #ifdef HAVE_cmpstrsi
3456 insn_mode
= insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
3461 /* If we don't have POINTER_TYPE, call the function. */
3462 if (arg1_align
== 0 || arg2_align
== 0)
3465 /* Make a place to write the result of the instruction. */
3468 && REG_P (result
) && GET_MODE (result
) == insn_mode
3469 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3470 result
= gen_reg_rtx (insn_mode
);
3472 arg1_rtx
= get_memory_rtx (arg1
);
3473 arg2_rtx
= get_memory_rtx (arg2
);
3474 arg3_rtx
= expand_expr (len
, NULL_RTX
, VOIDmode
, 0);
3476 /* Set MEM_SIZE as appropriate. */
3477 if (GET_CODE (arg3_rtx
) == CONST_INT
)
3479 set_mem_size (arg1_rtx
, arg3_rtx
);
3480 set_mem_size (arg2_rtx
, arg3_rtx
);
3483 #ifdef HAVE_cmpmemsi
3485 insn
= gen_cmpmemsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
3486 GEN_INT (MIN (arg1_align
, arg2_align
)));
3489 #ifdef HAVE_cmpstrsi
3491 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
3492 GEN_INT (MIN (arg1_align
, arg2_align
)));
3500 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE_MAKE_BLOCK
,
3501 TYPE_MODE (integer_type_node
), 3,
3502 XEXP (arg1_rtx
, 0), Pmode
,
3503 XEXP (arg2_rtx
, 0), Pmode
,
3504 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
3505 TYPE_UNSIGNED (sizetype
)),
3506 TYPE_MODE (sizetype
));
3508 /* Return the value in the proper mode for this function. */
3509 mode
= TYPE_MODE (TREE_TYPE (exp
));
3510 if (GET_MODE (result
) == mode
)
3512 else if (target
!= 0)
3514 convert_move (target
, result
, 0);
3518 return convert_to_mode (mode
, result
, 0);
3525 /* Expand expression EXP, which is a call to the strcmp builtin. Return 0
3526 if we failed the caller should emit a normal call, otherwise try to get
3527 the result in TARGET, if convenient. */
3530 expand_builtin_strcmp (tree exp
, rtx target
, enum machine_mode mode
)
3532 tree arglist
= TREE_OPERAND (exp
, 1);
3534 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3538 tree result
= fold_builtin_strcmp (arglist
);
3540 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3543 #ifdef HAVE_cmpstrsi
3546 tree arg1
= TREE_VALUE (arglist
);
3547 tree arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
3548 tree len
, len1
, len2
;
3549 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3554 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
3556 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
3557 enum machine_mode insn_mode
3558 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
3560 len1
= c_strlen (arg1
, 1);
3561 len2
= c_strlen (arg2
, 1);
3564 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
3566 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
3568 /* If we don't have a constant length for the first, use the length
3569 of the second, if we know it. We don't require a constant for
3570 this case; some cost analysis could be done if both are available
3571 but neither is constant. For now, assume they're equally cheap,
3572 unless one has side effects. If both strings have constant lengths,
3579 else if (TREE_SIDE_EFFECTS (len1
))
3581 else if (TREE_SIDE_EFFECTS (len2
))
3583 else if (TREE_CODE (len1
) != INTEGER_CST
)
3585 else if (TREE_CODE (len2
) != INTEGER_CST
)
3587 else if (tree_int_cst_lt (len1
, len2
))
3592 /* If both arguments have side effects, we cannot optimize. */
3593 if (!len
|| TREE_SIDE_EFFECTS (len
))
3596 /* If we don't have POINTER_TYPE, call the function. */
3597 if (arg1_align
== 0 || arg2_align
== 0)
3600 /* Make a place to write the result of the instruction. */
3603 && REG_P (result
) && GET_MODE (result
) == insn_mode
3604 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3605 result
= gen_reg_rtx (insn_mode
);
3607 /* Stabilize the arguments in case gen_cmpstrsi fails. */
3608 arg1
= builtin_save_expr (arg1
);
3609 arg2
= builtin_save_expr (arg2
);
3611 arg1_rtx
= get_memory_rtx (arg1
);
3612 arg2_rtx
= get_memory_rtx (arg2
);
3613 arg3_rtx
= expand_expr (len
, NULL_RTX
, VOIDmode
, 0);
3614 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
3615 GEN_INT (MIN (arg1_align
, arg2_align
)));
3620 /* Return the value in the proper mode for this function. */
3621 mode
= TYPE_MODE (TREE_TYPE (exp
));
3622 if (GET_MODE (result
) == mode
)
3625 return convert_to_mode (mode
, result
, 0);
3626 convert_move (target
, result
, 0);
3630 /* Expand the library call ourselves using a stabilized argument
3631 list to avoid re-evaluating the function's arguments twice. */
3632 arglist
= build_tree_list (NULL_TREE
, arg2
);
3633 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
3634 fndecl
= get_callee_fndecl (exp
);
3635 exp
= build_function_call_expr (fndecl
, arglist
);
3636 return expand_call (exp
, target
, target
== const0_rtx
);
3642 /* Expand expression EXP, which is a call to the strncmp builtin. Return 0
3643 if we failed the caller should emit a normal call, otherwise try to get
3644 the result in TARGET, if convenient. */
3647 expand_builtin_strncmp (tree exp
, rtx target
, enum machine_mode mode
)
3649 tree arglist
= TREE_OPERAND (exp
, 1);
3651 if (!validate_arglist (arglist
,
3652 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3656 tree result
= fold_builtin_strncmp (arglist
);
3658 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3661 /* If c_strlen can determine an expression for one of the string
3662 lengths, and it doesn't have side effects, then emit cmpstrsi
3663 using length MIN(strlen(string)+1, arg3). */
3664 #ifdef HAVE_cmpstrsi
3667 tree arg1
= TREE_VALUE (arglist
);
3668 tree arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
3669 tree arg3
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
3670 tree len
, len1
, len2
;
3671 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3676 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
3678 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
3679 enum machine_mode insn_mode
3680 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
3682 len1
= c_strlen (arg1
, 1);
3683 len2
= c_strlen (arg2
, 1);
3686 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
3688 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
3690 /* If we don't have a constant length for the first, use the length
3691 of the second, if we know it. We don't require a constant for
3692 this case; some cost analysis could be done if both are available
3693 but neither is constant. For now, assume they're equally cheap,
3694 unless one has side effects. If both strings have constant lengths,
3701 else if (TREE_SIDE_EFFECTS (len1
))
3703 else if (TREE_SIDE_EFFECTS (len2
))
3705 else if (TREE_CODE (len1
) != INTEGER_CST
)
3707 else if (TREE_CODE (len2
) != INTEGER_CST
)
3709 else if (tree_int_cst_lt (len1
, len2
))
3714 /* If both arguments have side effects, we cannot optimize. */
3715 if (!len
|| TREE_SIDE_EFFECTS (len
))
3718 /* The actual new length parameter is MIN(len,arg3). */
3719 len
= fold (build2 (MIN_EXPR
, TREE_TYPE (len
), len
,
3720 fold_convert (TREE_TYPE (len
), arg3
)));
3722 /* If we don't have POINTER_TYPE, call the function. */
3723 if (arg1_align
== 0 || arg2_align
== 0)
3726 /* Make a place to write the result of the instruction. */
3729 && REG_P (result
) && GET_MODE (result
) == insn_mode
3730 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3731 result
= gen_reg_rtx (insn_mode
);
3733 /* Stabilize the arguments in case gen_cmpstrsi fails. */
3734 arg1
= builtin_save_expr (arg1
);
3735 arg2
= builtin_save_expr (arg2
);
3736 len
= builtin_save_expr (len
);
3738 arg1_rtx
= get_memory_rtx (arg1
);
3739 arg2_rtx
= get_memory_rtx (arg2
);
3740 arg3_rtx
= expand_expr (len
, NULL_RTX
, VOIDmode
, 0);
3741 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
3742 GEN_INT (MIN (arg1_align
, arg2_align
)));
3747 /* Return the value in the proper mode for this function. */
3748 mode
= TYPE_MODE (TREE_TYPE (exp
));
3749 if (GET_MODE (result
) == mode
)
3752 return convert_to_mode (mode
, result
, 0);
3753 convert_move (target
, result
, 0);
3757 /* Expand the library call ourselves using a stabilized argument
3758 list to avoid re-evaluating the function's arguments twice. */
3759 arglist
= build_tree_list (NULL_TREE
, len
);
3760 arglist
= tree_cons (NULL_TREE
, arg2
, arglist
);
3761 arglist
= tree_cons (NULL_TREE
, arg1
, arglist
);
3762 fndecl
= get_callee_fndecl (exp
);
3763 exp
= build_function_call_expr (fndecl
, arglist
);
3764 return expand_call (exp
, target
, target
== const0_rtx
);
3770 /* Expand expression EXP, which is a call to the strcat builtin.
3771 Return 0 if we failed the caller should emit a normal call,
3772 otherwise try to get the result in TARGET, if convenient. */
3775 expand_builtin_strcat (tree arglist
, tree type
, rtx target
, enum machine_mode mode
)
3777 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3781 tree dst
= TREE_VALUE (arglist
),
3782 src
= TREE_VALUE (TREE_CHAIN (arglist
));
3783 const char *p
= c_getstr (src
);
3787 /* If the string length is zero, return the dst parameter. */
3789 return expand_expr (dst
, target
, mode
, EXPAND_NORMAL
);
3790 else if (!optimize_size
)
3792 /* Otherwise if !optimize_size, see if we can store by
3793 pieces into (dst + strlen(dst)). */
3794 tree newdst
, arglist
,
3795 strlen_fn
= implicit_built_in_decls
[BUILT_IN_STRLEN
];
3797 /* This is the length argument. */
3798 arglist
= build_tree_list (NULL_TREE
,
3799 fold (size_binop (PLUS_EXPR
,
3802 /* Prepend src argument. */
3803 arglist
= tree_cons (NULL_TREE
, src
, arglist
);
3805 /* We're going to use dst more than once. */
3806 dst
= builtin_save_expr (dst
);
3808 /* Create strlen (dst). */
3810 fold (build_function_call_expr (strlen_fn
,
3811 build_tree_list (NULL_TREE
,
3813 /* Create (dst + (cast) strlen (dst)). */
3814 newdst
= fold_convert (TREE_TYPE (dst
), newdst
);
3815 newdst
= fold (build2 (PLUS_EXPR
, TREE_TYPE (dst
), dst
, newdst
));
3817 /* Prepend the new dst argument. */
3818 arglist
= tree_cons (NULL_TREE
, newdst
, arglist
);
3820 /* We don't want to get turned into a memcpy if the
3821 target is const0_rtx, i.e. when the return value
3822 isn't used. That would produce pessimized code so
3823 pass in a target of zero, it should never actually be
3824 used. If this was successful return the original
3825 dst, not the result of mempcpy. */
3826 if (expand_builtin_mempcpy (arglist
, type
, /*target=*/0, mode
, /*endp=*/0))
3827 return expand_expr (dst
, target
, mode
, EXPAND_NORMAL
);
3837 /* Expand expression EXP, which is a call to the strncat builtin.
3838 Return 0 if we failed the caller should emit a normal call,
3839 otherwise try to get the result in TARGET, if convenient. */
3842 expand_builtin_strncat (tree arglist
, rtx target
, enum machine_mode mode
)
3844 if (validate_arglist (arglist
,
3845 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3847 tree result
= fold_builtin_strncat (arglist
);
3849 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3854 /* Expand expression EXP, which is a call to the strspn builtin.
3855 Return 0 if we failed the caller should emit a normal call,
3856 otherwise try to get the result in TARGET, if convenient. */
3859 expand_builtin_strspn (tree arglist
, rtx target
, enum machine_mode mode
)
3861 if (validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3863 tree result
= fold_builtin_strspn (arglist
);
3865 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3870 /* Expand expression EXP, which is a call to the strcspn builtin.
3871 Return 0 if we failed the caller should emit a normal call,
3872 otherwise try to get the result in TARGET, if convenient. */
3875 expand_builtin_strcspn (tree arglist
, rtx target
, enum machine_mode mode
)
3877 if (validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3879 tree result
= fold_builtin_strcspn (arglist
);
3881 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3886 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3887 if that's convenient. */
3890 expand_builtin_saveregs (void)
3894 /* Don't do __builtin_saveregs more than once in a function.
3895 Save the result of the first call and reuse it. */
3896 if (saveregs_value
!= 0)
3897 return saveregs_value
;
3899 /* When this function is called, it means that registers must be
3900 saved on entry to this function. So we migrate the call to the
3901 first insn of this function. */
3905 /* Do whatever the machine needs done in this case. */
3906 val
= targetm
.calls
.expand_builtin_saveregs ();
3911 saveregs_value
= val
;
3913 /* Put the insns after the NOTE that starts the function. If this
3914 is inside a start_sequence, make the outer-level insn chain current, so
3915 the code is placed at the start of the function. */
3916 push_topmost_sequence ();
3917 emit_insn_after (seq
, entry_of_function ());
3918 pop_topmost_sequence ();
3923 /* __builtin_args_info (N) returns word N of the arg space info
3924 for the current function. The number and meanings of words
3925 is controlled by the definition of CUMULATIVE_ARGS. */
3928 expand_builtin_args_info (tree arglist
)
3930 int nwords
= sizeof (CUMULATIVE_ARGS
) / sizeof (int);
3931 int *word_ptr
= (int *) ¤t_function_args_info
;
3933 gcc_assert (sizeof (CUMULATIVE_ARGS
) % sizeof (int) == 0);
3937 if (!host_integerp (TREE_VALUE (arglist
), 0))
3938 error ("argument of %<__builtin_args_info%> must be constant");
3941 HOST_WIDE_INT wordnum
= tree_low_cst (TREE_VALUE (arglist
), 0);
3943 if (wordnum
< 0 || wordnum
>= nwords
)
3944 error ("argument of %<__builtin_args_info%> out of range");
3946 return GEN_INT (word_ptr
[wordnum
]);
3950 error ("missing argument in %<__builtin_args_info%>");
3955 /* Expand a call to __builtin_next_arg. */
3958 expand_builtin_next_arg (void)
3960 /* Checking arguments is already done in fold_builtin_next_arg
3961 that must be called before this function. */
3962 return expand_binop (Pmode
, add_optab
,
3963 current_function_internal_arg_pointer
,
3964 current_function_arg_offset_rtx
,
3965 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
3968 /* Make it easier for the backends by protecting the valist argument
3969 from multiple evaluations. */
3972 stabilize_va_list (tree valist
, int needs_lvalue
)
3974 if (TREE_CODE (va_list_type_node
) == ARRAY_TYPE
)
3976 if (TREE_SIDE_EFFECTS (valist
))
3977 valist
= save_expr (valist
);
3979 /* For this case, the backends will be expecting a pointer to
3980 TREE_TYPE (va_list_type_node), but it's possible we've
3981 actually been given an array (an actual va_list_type_node).
3983 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
3985 tree p1
= build_pointer_type (TREE_TYPE (va_list_type_node
));
3986 valist
= build_fold_addr_expr_with_type (valist
, p1
);
3995 if (! TREE_SIDE_EFFECTS (valist
))
3998 pt
= build_pointer_type (va_list_type_node
);
3999 valist
= fold (build1 (ADDR_EXPR
, pt
, valist
));
4000 TREE_SIDE_EFFECTS (valist
) = 1;
4003 if (TREE_SIDE_EFFECTS (valist
))
4004 valist
= save_expr (valist
);
4005 valist
= build_fold_indirect_ref (valist
);
4011 /* The "standard" definition of va_list is void*. */
4014 std_build_builtin_va_list (void)
4016 return ptr_type_node
;
4019 /* The "standard" implementation of va_start: just assign `nextarg' to
4023 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
4027 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist
,
4028 make_tree (ptr_type_node
, nextarg
));
4029 TREE_SIDE_EFFECTS (t
) = 1;
4031 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4034 /* Expand ARGLIST, from a call to __builtin_va_start. */
4037 expand_builtin_va_start (tree arglist
)
4042 chain
= TREE_CHAIN (arglist
);
4046 error ("too few arguments to function %<va_start%>");
4050 if (fold_builtin_next_arg (chain
))
4053 nextarg
= expand_builtin_next_arg ();
4054 valist
= stabilize_va_list (TREE_VALUE (arglist
), 1);
4056 #ifdef EXPAND_BUILTIN_VA_START
4057 EXPAND_BUILTIN_VA_START (valist
, nextarg
);
4059 std_expand_builtin_va_start (valist
, nextarg
);
4065 /* The "standard" implementation of va_arg: read the value from the
4066 current (padded) address and increment by the (padded) size. */
4069 std_gimplify_va_arg_expr (tree valist
, tree type
, tree
*pre_p
, tree
*post_p
)
4071 tree addr
, t
, type_size
, rounded_size
, valist_tmp
;
4072 unsigned HOST_WIDE_INT align
, boundary
;
4075 #ifdef ARGS_GROW_DOWNWARD
4076 /* All of the alignment and movement below is for args-grow-up machines.
4077 As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4078 implement their own specialized gimplify_va_arg_expr routines. */
4082 indirect
= pass_by_reference (NULL
, TYPE_MODE (type
), type
, false);
4084 type
= build_pointer_type (type
);
4086 align
= PARM_BOUNDARY
/ BITS_PER_UNIT
;
4087 boundary
= FUNCTION_ARG_BOUNDARY (TYPE_MODE (type
), type
) / BITS_PER_UNIT
;
4089 /* Hoist the valist value into a temporary for the moment. */
4090 valist_tmp
= get_initialized_tmp_var (valist
, pre_p
, NULL
);
4092 /* va_list pointer is aligned to PARM_BOUNDARY. If argument actually
4093 requires greater alignment, we must perform dynamic alignment. */
4094 if (boundary
> align
)
4096 t
= fold_convert (TREE_TYPE (valist
), size_int (boundary
- 1));
4097 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist_tmp
,
4098 build2 (PLUS_EXPR
, TREE_TYPE (valist
), valist_tmp
, t
));
4099 gimplify_and_add (t
, pre_p
);
4101 t
= fold_convert (TREE_TYPE (valist
), size_int (-boundary
));
4102 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist_tmp
,
4103 build2 (BIT_AND_EXPR
, TREE_TYPE (valist
), valist_tmp
, t
));
4104 gimplify_and_add (t
, pre_p
);
4109 /* If the actual alignment is less than the alignment of the type,
4110 adjust the type accordingly so that we don't assume strict alignment
4111 when deferencing the pointer. */
4112 boundary
*= BITS_PER_UNIT
;
4113 if (boundary
< TYPE_ALIGN (type
))
4115 type
= build_variant_type_copy (type
);
4116 TYPE_ALIGN (type
) = boundary
;
4119 /* Compute the rounded size of the type. */
4120 type_size
= size_in_bytes (type
);
4121 rounded_size
= round_up (type_size
, align
);
4123 /* Reduce rounded_size so it's sharable with the postqueue. */
4124 gimplify_expr (&rounded_size
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
4128 if (PAD_VARARGS_DOWN
&& !integer_zerop (rounded_size
))
4130 /* Small args are padded downward. */
4131 t
= fold (build2 (GT_EXPR
, sizetype
, rounded_size
, size_int (align
)));
4132 t
= fold (build3 (COND_EXPR
, sizetype
, t
, size_zero_node
,
4133 size_binop (MINUS_EXPR
, rounded_size
, type_size
)));
4134 t
= fold_convert (TREE_TYPE (addr
), t
);
4135 addr
= fold (build2 (PLUS_EXPR
, TREE_TYPE (addr
), addr
, t
));
4138 /* Compute new value for AP. */
4139 t
= fold_convert (TREE_TYPE (valist
), rounded_size
);
4140 t
= build2 (PLUS_EXPR
, TREE_TYPE (valist
), valist_tmp
, t
);
4141 t
= build2 (MODIFY_EXPR
, TREE_TYPE (valist
), valist
, t
);
4142 gimplify_and_add (t
, pre_p
);
4144 addr
= fold_convert (build_pointer_type (type
), addr
);
4147 addr
= build_va_arg_indirect_ref (addr
);
4149 return build_va_arg_indirect_ref (addr
);
4152 /* Build an indirect-ref expression over the given TREE, which represents a
4153 piece of a va_arg() expansion. */
4155 build_va_arg_indirect_ref (tree addr
)
4157 addr
= build_fold_indirect_ref (addr
);
4159 if (flag_mudflap
) /* Don't instrument va_arg INDIRECT_REF. */
4165 /* Return a dummy expression of type TYPE in order to keep going after an
4169 dummy_object (tree type
)
4171 tree t
= convert (build_pointer_type (type
), null_pointer_node
);
4172 return build1 (INDIRECT_REF
, type
, t
);
4175 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4176 builtin function, but a very special sort of operator. */
4178 enum gimplify_status
4179 gimplify_va_arg_expr (tree
*expr_p
, tree
*pre_p
, tree
*post_p
)
4181 tree promoted_type
, want_va_type
, have_va_type
;
4182 tree valist
= TREE_OPERAND (*expr_p
, 0);
4183 tree type
= TREE_TYPE (*expr_p
);
4186 /* Verify that valist is of the proper type. */
4187 want_va_type
= va_list_type_node
;
4188 have_va_type
= TREE_TYPE (valist
);
4190 if (have_va_type
== error_mark_node
)
4193 if (TREE_CODE (want_va_type
) == ARRAY_TYPE
)
4195 /* If va_list is an array type, the argument may have decayed
4196 to a pointer type, e.g. by being passed to another function.
4197 In that case, unwrap both types so that we can compare the
4198 underlying records. */
4199 if (TREE_CODE (have_va_type
) == ARRAY_TYPE
4200 || POINTER_TYPE_P (have_va_type
))
4202 want_va_type
= TREE_TYPE (want_va_type
);
4203 have_va_type
= TREE_TYPE (have_va_type
);
4207 if (TYPE_MAIN_VARIANT (want_va_type
) != TYPE_MAIN_VARIANT (have_va_type
))
4209 error ("first argument to %<va_arg%> not of type %<va_list%>");
4213 /* Generate a diagnostic for requesting data of a type that cannot
4214 be passed through `...' due to type promotion at the call site. */
4215 else if ((promoted_type
= lang_hooks
.types
.type_promotes_to (type
))
4218 static bool gave_help
;
4220 /* Unfortunately, this is merely undefined, rather than a constraint
4221 violation, so we cannot make this an error. If this call is never
4222 executed, the program is still strictly conforming. */
4223 warning ("%qT is promoted to %qT when passed through %<...%>",
4224 type
, promoted_type
);
4228 warning ("(so you should pass %qT not %qT to %<va_arg%>)",
4229 promoted_type
, type
);
4232 /* We can, however, treat "undefined" any way we please.
4233 Call abort to encourage the user to fix the program. */
4234 inform ("if this code is reached, the program will abort");
4235 t
= build_function_call_expr (implicit_built_in_decls
[BUILT_IN_TRAP
],
4237 append_to_statement_list (t
, pre_p
);
4239 /* This is dead code, but go ahead and finish so that the
4240 mode of the result comes out right. */
4241 *expr_p
= dummy_object (type
);
4246 /* Make it easier for the backends by protecting the valist argument
4247 from multiple evaluations. */
4248 if (TREE_CODE (va_list_type_node
) == ARRAY_TYPE
)
4250 /* For this case, the backends will be expecting a pointer to
4251 TREE_TYPE (va_list_type_node), but it's possible we've
4252 actually been given an array (an actual va_list_type_node).
4254 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
4256 tree p1
= build_pointer_type (TREE_TYPE (va_list_type_node
));
4257 valist
= build_fold_addr_expr_with_type (valist
, p1
);
4259 gimplify_expr (&valist
, pre_p
, post_p
, is_gimple_val
, fb_rvalue
);
4262 gimplify_expr (&valist
, pre_p
, post_p
, is_gimple_min_lval
, fb_lvalue
);
4264 if (!targetm
.gimplify_va_arg_expr
)
4265 /* Once most targets are converted this should abort. */
4268 *expr_p
= targetm
.gimplify_va_arg_expr (valist
, type
, pre_p
, post_p
);
4273 /* Expand ARGLIST, from a call to __builtin_va_end. */
4276 expand_builtin_va_end (tree arglist
)
4278 tree valist
= TREE_VALUE (arglist
);
4280 /* Evaluate for side effects, if needed. I hate macros that don't
4282 if (TREE_SIDE_EFFECTS (valist
))
4283 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4288 /* Expand ARGLIST, from a call to __builtin_va_copy. We do this as a
4289 builtin rather than just as an assignment in stdarg.h because of the
4290 nastiness of array-type va_list types. */
4293 expand_builtin_va_copy (tree arglist
)
4297 dst
= TREE_VALUE (arglist
);
4298 src
= TREE_VALUE (TREE_CHAIN (arglist
));
4300 dst
= stabilize_va_list (dst
, 1);
4301 src
= stabilize_va_list (src
, 0);
4303 if (TREE_CODE (va_list_type_node
) != ARRAY_TYPE
)
4305 t
= build2 (MODIFY_EXPR
, va_list_type_node
, dst
, src
);
4306 TREE_SIDE_EFFECTS (t
) = 1;
4307 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4311 rtx dstb
, srcb
, size
;
4313 /* Evaluate to pointers. */
4314 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4315 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
4316 size
= expand_expr (TYPE_SIZE_UNIT (va_list_type_node
), NULL_RTX
,
4317 VOIDmode
, EXPAND_NORMAL
);
4319 dstb
= convert_memory_address (Pmode
, dstb
);
4320 srcb
= convert_memory_address (Pmode
, srcb
);
4322 /* "Dereference" to BLKmode memories. */
4323 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
4324 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
4325 set_mem_align (dstb
, TYPE_ALIGN (va_list_type_node
));
4326 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
4327 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
4328 set_mem_align (srcb
, TYPE_ALIGN (va_list_type_node
));
4331 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
4337 /* Expand a call to one of the builtin functions __builtin_frame_address or
4338 __builtin_return_address. */
4341 expand_builtin_frame_address (tree fndecl
, tree arglist
)
4343 /* The argument must be a nonnegative integer constant.
4344 It counts the number of frames to scan up the stack.
4345 The value is the return address saved in that frame. */
4347 /* Warning about missing arg was already issued. */
4349 else if (! host_integerp (TREE_VALUE (arglist
), 1))
4351 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4352 error ("invalid argument to %<__builtin_frame_address%>");
4354 error ("invalid argument to %<__builtin_return_address%>");
4360 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
4361 tree_low_cst (TREE_VALUE (arglist
), 1));
4363 /* Some ports cannot access arbitrary stack frames. */
4366 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4367 warning ("unsupported argument to %<__builtin_frame_address%>");
4369 warning ("unsupported argument to %<__builtin_return_address%>");
4373 /* For __builtin_frame_address, return what we've got. */
4374 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
4378 && ! CONSTANT_P (tem
))
4379 tem
= copy_to_mode_reg (Pmode
, tem
);
4384 /* Expand a call to the alloca builtin, with arguments ARGLIST. Return 0 if
4385 we failed and the caller should emit a normal call, otherwise try to get
4386 the result in TARGET, if convenient. */
4389 expand_builtin_alloca (tree arglist
, rtx target
)
4394 /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
4395 should always expand to function calls. These can be intercepted
4400 if (!validate_arglist (arglist
, INTEGER_TYPE
, VOID_TYPE
))
4403 /* Compute the argument. */
4404 op0
= expand_expr (TREE_VALUE (arglist
), NULL_RTX
, VOIDmode
, 0);
4406 /* Allocate the desired space. */
4407 result
= allocate_dynamic_stack_space (op0
, target
, BITS_PER_UNIT
);
4408 result
= convert_memory_address (ptr_mode
, result
);
4413 /* Expand a call to a unary builtin. The arguments are in ARGLIST.
4414 Return 0 if a normal call should be emitted rather than expanding the
4415 function in-line. If convenient, the result should be placed in TARGET.
4416 SUBTARGET may be used as the target for computing one of EXP's operands. */
4419 expand_builtin_unop (enum machine_mode target_mode
, tree arglist
, rtx target
,
4420 rtx subtarget
, optab op_optab
)
4423 if (!validate_arglist (arglist
, INTEGER_TYPE
, VOID_TYPE
))
4426 /* Compute the argument. */
4427 op0
= expand_expr (TREE_VALUE (arglist
), subtarget
, VOIDmode
, 0);
4428 /* Compute op, into TARGET if possible.
4429 Set TARGET to wherever the result comes back. */
4430 target
= expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist
))),
4431 op_optab
, op0
, target
, 1);
4432 gcc_assert (target
);
4434 return convert_to_mode (target_mode
, target
, 0);
4437 /* If the string passed to fputs is a constant and is one character
4438 long, we attempt to transform this call into __builtin_fputc(). */
4441 expand_builtin_fputs (tree arglist
, rtx target
, bool unlocked
)
4443 /* Verify the arguments in the original call. */
4444 if (validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
4446 tree result
= fold_builtin_fputs (arglist
, (target
== const0_rtx
),
4447 unlocked
, NULL_TREE
);
4449 return expand_expr (result
, target
, VOIDmode
, EXPAND_NORMAL
);
4454 /* Expand a call to __builtin_expect. We return our argument and emit a
4455 NOTE_INSN_EXPECTED_VALUE note. This is the expansion of __builtin_expect in
4456 a non-jump context. */
4459 expand_builtin_expect (tree arglist
, rtx target
)
4464 if (arglist
== NULL_TREE
4465 || TREE_CHAIN (arglist
) == NULL_TREE
)
4467 exp
= TREE_VALUE (arglist
);
4468 c
= TREE_VALUE (TREE_CHAIN (arglist
));
4470 if (TREE_CODE (c
) != INTEGER_CST
)
4472 error ("second argument to %<__builtin_expect%> must be a constant");
4473 c
= integer_zero_node
;
4476 target
= expand_expr (exp
, target
, VOIDmode
, EXPAND_NORMAL
);
4478 /* Don't bother with expected value notes for integral constants. */
4479 if (flag_guess_branch_prob
&& GET_CODE (target
) != CONST_INT
)
4481 /* We do need to force this into a register so that we can be
4482 moderately sure to be able to correctly interpret the branch
4484 target
= force_reg (GET_MODE (target
), target
);
4486 rtx_c
= expand_expr (c
, NULL_RTX
, GET_MODE (target
), EXPAND_NORMAL
);
4488 note
= emit_note (NOTE_INSN_EXPECTED_VALUE
);
4489 NOTE_EXPECTED_VALUE (note
) = gen_rtx_EQ (VOIDmode
, target
, rtx_c
);
4495 /* Like expand_builtin_expect, except do this in a jump context. This is
4496 called from do_jump if the conditional is a __builtin_expect. Return either
4497 a list of insns to emit the jump or NULL if we cannot optimize
4498 __builtin_expect. We need to optimize this at jump time so that machines
4499 like the PowerPC don't turn the test into a SCC operation, and then jump
4500 based on the test being 0/1. */
4503 expand_builtin_expect_jump (tree exp
, rtx if_false_label
, rtx if_true_label
)
4505 tree arglist
= TREE_OPERAND (exp
, 1);
4506 tree arg0
= TREE_VALUE (arglist
);
4507 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4510 /* Only handle __builtin_expect (test, 0) and
4511 __builtin_expect (test, 1). */
4512 if (TREE_CODE (TREE_TYPE (arg1
)) == INTEGER_TYPE
4513 && (integer_zerop (arg1
) || integer_onep (arg1
)))
4515 rtx insn
, drop_through_label
, temp
;
4517 /* Expand the jump insns. */
4519 do_jump (arg0
, if_false_label
, if_true_label
);
4522 drop_through_label
= get_last_insn ();
4523 if (drop_through_label
&& NOTE_P (drop_through_label
))
4524 drop_through_label
= prev_nonnote_insn (drop_through_label
);
4525 if (drop_through_label
&& !LABEL_P (drop_through_label
))
4526 drop_through_label
= NULL_RTX
;
4529 if (! if_true_label
)
4530 if_true_label
= drop_through_label
;
4531 if (! if_false_label
)
4532 if_false_label
= drop_through_label
;
4534 /* Go through and add the expect's to each of the conditional jumps. */
4536 while (insn
!= NULL_RTX
)
4538 rtx next
= NEXT_INSN (insn
);
4540 if (JUMP_P (insn
) && any_condjump_p (insn
))
4542 rtx ifelse
= SET_SRC (pc_set (insn
));
4543 rtx then_dest
= XEXP (ifelse
, 1);
4544 rtx else_dest
= XEXP (ifelse
, 2);
4547 /* First check if we recognize any of the labels. */
4548 if (GET_CODE (then_dest
) == LABEL_REF
4549 && XEXP (then_dest
, 0) == if_true_label
)
4551 else if (GET_CODE (then_dest
) == LABEL_REF
4552 && XEXP (then_dest
, 0) == if_false_label
)
4554 else if (GET_CODE (else_dest
) == LABEL_REF
4555 && XEXP (else_dest
, 0) == if_false_label
)
4557 else if (GET_CODE (else_dest
) == LABEL_REF
4558 && XEXP (else_dest
, 0) == if_true_label
)
4560 /* Otherwise check where we drop through. */
4561 else if (else_dest
== pc_rtx
)
4563 if (next
&& NOTE_P (next
))
4564 next
= next_nonnote_insn (next
);
4566 if (next
&& JUMP_P (next
)
4567 && any_uncondjump_p (next
))
4568 temp
= XEXP (SET_SRC (pc_set (next
)), 0);
4572 /* TEMP is either a CODE_LABEL, NULL_RTX or something
4573 else that can't possibly match either target label. */
4574 if (temp
== if_false_label
)
4576 else if (temp
== if_true_label
)
4579 else if (then_dest
== pc_rtx
)
4581 if (next
&& NOTE_P (next
))
4582 next
= next_nonnote_insn (next
);
4584 if (next
&& JUMP_P (next
)
4585 && any_uncondjump_p (next
))
4586 temp
= XEXP (SET_SRC (pc_set (next
)), 0);
4590 if (temp
== if_false_label
)
4592 else if (temp
== if_true_label
)
4598 /* If the test is expected to fail, reverse the
4600 if (integer_zerop (arg1
))
4602 predict_insn_def (insn
, PRED_BUILTIN_EXPECT
, taken
);
4614 expand_builtin_trap (void)
4618 emit_insn (gen_trap ());
4621 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
4625 /* Expand a call to fabs, fabsf or fabsl with arguments ARGLIST.
4626 Return 0 if a normal call should be emitted rather than expanding
4627 the function inline. If convenient, the result should be placed
4628 in TARGET. SUBTARGET may be used as the target for computing
4632 expand_builtin_fabs (tree arglist
, rtx target
, rtx subtarget
)
4634 enum machine_mode mode
;
4638 if (!validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
4641 arg
= TREE_VALUE (arglist
);
4642 mode
= TYPE_MODE (TREE_TYPE (arg
));
4643 op0
= expand_expr (arg
, subtarget
, VOIDmode
, 0);
4644 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
4647 /* Expand a call to copysign, copysignf, or copysignl with arguments ARGLIST.
4648 Return NULL is a normal call should be emitted rather than expanding the
4649 function inline. If convenient, the result should be placed in TARGET.
4650 SUBTARGET may be used as the target for computing the operand. */
4653 expand_builtin_copysign (tree arglist
, rtx target
, rtx subtarget
)
4658 if (!validate_arglist (arglist
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
4661 arg
= TREE_VALUE (arglist
);
4662 op0
= expand_expr (arg
, subtarget
, VOIDmode
, 0);
4664 arg
= TREE_VALUE (TREE_CHAIN (arglist
));
4665 op1
= expand_expr (arg
, NULL
, VOIDmode
, 0);
4667 return expand_copysign (op0
, op1
, target
);
4670 /* Create a new constant string literal and return a char* pointer to it.
4671 The STRING_CST value is the LEN characters at STR. */
4673 build_string_literal (int len
, const char *str
)
4675 tree t
, elem
, index
, type
;
4677 t
= build_string (len
, str
);
4678 elem
= build_type_variant (char_type_node
, 1, 0);
4679 index
= build_index_type (build_int_cst (NULL_TREE
, len
- 1));
4680 type
= build_array_type (elem
, index
);
4681 TREE_TYPE (t
) = type
;
4682 TREE_CONSTANT (t
) = 1;
4683 TREE_INVARIANT (t
) = 1;
4684 TREE_READONLY (t
) = 1;
4685 TREE_STATIC (t
) = 1;
4687 type
= build_pointer_type (type
);
4688 t
= build1 (ADDR_EXPR
, type
, t
);
4690 type
= build_pointer_type (elem
);
4691 t
= build1 (NOP_EXPR
, type
, t
);
4695 /* Expand a call to printf or printf_unlocked with argument list ARGLIST.
4696 Return 0 if a normal call should be emitted rather than transforming
4697 the function inline. If convenient, the result should be placed in
4698 TARGET with mode MODE. UNLOCKED indicates this is a printf_unlocked
4701 expand_builtin_printf (tree arglist
, rtx target
, enum machine_mode mode
,
4704 tree fn_putchar
= unlocked
4705 ? implicit_built_in_decls
[BUILT_IN_PUTCHAR_UNLOCKED
]
4706 : implicit_built_in_decls
[BUILT_IN_PUTCHAR
];
4707 tree fn_puts
= unlocked
? implicit_built_in_decls
[BUILT_IN_PUTS_UNLOCKED
]
4708 : implicit_built_in_decls
[BUILT_IN_PUTS
];
4709 const char *fmt_str
;
4712 /* If the return value is used, don't do the transformation. */
4713 if (target
!= const0_rtx
)
4716 /* Verify the required arguments in the original call. */
4719 fmt
= TREE_VALUE (arglist
);
4720 if (! POINTER_TYPE_P (TREE_TYPE (fmt
)))
4722 arglist
= TREE_CHAIN (arglist
);
4724 /* Check whether the format is a literal string constant. */
4725 fmt_str
= c_getstr (fmt
);
4726 if (fmt_str
== NULL
)
4729 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
4730 if (strcmp (fmt_str
, "%s\n") == 0)
4733 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist
)))
4734 || TREE_CHAIN (arglist
))
4738 /* If the format specifier was "%c", call __builtin_putchar(arg). */
4739 else if (strcmp (fmt_str
, "%c") == 0)
4742 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != INTEGER_TYPE
4743 || TREE_CHAIN (arglist
))
4749 /* We can't handle anything else with % args or %% ... yet. */
4750 if (strchr (fmt_str
, '%'))
4756 /* If the format specifier was "", printf does nothing. */
4757 if (fmt_str
[0] == '\0')
4759 /* If the format specifier has length of 1, call putchar. */
4760 if (fmt_str
[1] == '\0')
4762 /* Given printf("c"), (where c is any one character,)
4763 convert "c"[0] to an int and pass that to the replacement
4765 arg
= build_int_cst (NULL_TREE
, fmt_str
[0]);
4766 arglist
= build_tree_list (NULL_TREE
, arg
);
4771 /* If the format specifier was "string\n", call puts("string"). */
4772 size_t len
= strlen (fmt_str
);
4773 if (fmt_str
[len
- 1] == '\n')
4775 /* Create a NUL-terminated string that's one char shorter
4776 than the original, stripping off the trailing '\n'. */
4777 char *newstr
= alloca (len
);
4778 memcpy (newstr
, fmt_str
, len
- 1);
4779 newstr
[len
- 1] = 0;
4781 arg
= build_string_literal (len
, newstr
);
4782 arglist
= build_tree_list (NULL_TREE
, arg
);
4786 /* We'd like to arrange to call fputs(string,stdout) here,
4787 but we need stdout and don't have a way to get it yet. */
4794 return expand_expr (build_function_call_expr (fn
, arglist
),
4795 target
, mode
, EXPAND_NORMAL
);
4798 /* Expand a call to fprintf or fprintf_unlocked with argument list ARGLIST.
4799 Return 0 if a normal call should be emitted rather than transforming
4800 the function inline. If convenient, the result should be placed in
4801 TARGET with mode MODE. UNLOCKED indicates this is a fprintf_unlocked
4804 expand_builtin_fprintf (tree arglist
, rtx target
, enum machine_mode mode
,
4807 tree fn_fputc
= unlocked
? implicit_built_in_decls
[BUILT_IN_FPUTC_UNLOCKED
]
4808 : implicit_built_in_decls
[BUILT_IN_FPUTC
];
4809 tree fn_fputs
= unlocked
? implicit_built_in_decls
[BUILT_IN_FPUTS_UNLOCKED
]
4810 : implicit_built_in_decls
[BUILT_IN_FPUTS
];
4811 const char *fmt_str
;
4812 tree fn
, fmt
, fp
, arg
;
4814 /* If the return value is used, don't do the transformation. */
4815 if (target
!= const0_rtx
)
4818 /* Verify the required arguments in the original call. */
4821 fp
= TREE_VALUE (arglist
);
4822 if (! POINTER_TYPE_P (TREE_TYPE (fp
)))
4824 arglist
= TREE_CHAIN (arglist
);
4827 fmt
= TREE_VALUE (arglist
);
4828 if (! POINTER_TYPE_P (TREE_TYPE (fmt
)))
4830 arglist
= TREE_CHAIN (arglist
);
4832 /* Check whether the format is a literal string constant. */
4833 fmt_str
= c_getstr (fmt
);
4834 if (fmt_str
== NULL
)
4837 /* If the format specifier was "%s", call __builtin_fputs(arg,fp). */
4838 if (strcmp (fmt_str
, "%s") == 0)
4841 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist
)))
4842 || TREE_CHAIN (arglist
))
4844 arg
= TREE_VALUE (arglist
);
4845 arglist
= build_tree_list (NULL_TREE
, fp
);
4846 arglist
= tree_cons (NULL_TREE
, arg
, arglist
);
4849 /* If the format specifier was "%c", call __builtin_fputc(arg,fp). */
4850 else if (strcmp (fmt_str
, "%c") == 0)
4853 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != INTEGER_TYPE
4854 || TREE_CHAIN (arglist
))
4856 arg
= TREE_VALUE (arglist
);
4857 arglist
= build_tree_list (NULL_TREE
, fp
);
4858 arglist
= tree_cons (NULL_TREE
, arg
, arglist
);
4863 /* We can't handle anything else with % args or %% ... yet. */
4864 if (strchr (fmt_str
, '%'))
4870 /* If the format specifier was "", fprintf does nothing. */
4871 if (fmt_str
[0] == '\0')
4873 /* Evaluate and ignore FILE* argument for side-effects. */
4874 expand_expr (fp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4878 /* When "string" doesn't contain %, replace all cases of
4879 fprintf(stream,string) with fputs(string,stream). The fputs
4880 builtin will take care of special cases like length == 1. */
4881 arglist
= build_tree_list (NULL_TREE
, fp
);
4882 arglist
= tree_cons (NULL_TREE
, fmt
, arglist
);
4888 return expand_expr (build_function_call_expr (fn
, arglist
),
4889 target
, mode
, EXPAND_NORMAL
);
4892 /* Expand a call to sprintf with argument list ARGLIST. Return 0 if
4893 a normal call should be emitted rather than expanding the function
4894 inline. If convenient, the result should be placed in TARGET with
4898 expand_builtin_sprintf (tree arglist
, rtx target
, enum machine_mode mode
)
4900 tree orig_arglist
, dest
, fmt
;
4901 const char *fmt_str
;
4903 orig_arglist
= arglist
;
4905 /* Verify the required arguments in the original call. */
4908 dest
= TREE_VALUE (arglist
);
4909 if (! POINTER_TYPE_P (TREE_TYPE (dest
)))
4911 arglist
= TREE_CHAIN (arglist
);
4914 fmt
= TREE_VALUE (arglist
);
4915 if (! POINTER_TYPE_P (TREE_TYPE (fmt
)))
4917 arglist
= TREE_CHAIN (arglist
);
4919 /* Check whether the format is a literal string constant. */
4920 fmt_str
= c_getstr (fmt
);
4921 if (fmt_str
== NULL
)
4924 /* If the format doesn't contain % args or %%, use strcpy. */
4925 if (strchr (fmt_str
, '%') == 0)
4927 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
4930 if (arglist
|| ! fn
)
4932 expand_expr (build_function_call_expr (fn
, orig_arglist
),
4933 const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4934 if (target
== const0_rtx
)
4936 exp
= build_int_cst (NULL_TREE
, strlen (fmt_str
));
4937 return expand_expr (exp
, target
, mode
, EXPAND_NORMAL
);
4939 /* If the format is "%s", use strcpy if the result isn't used. */
4940 else if (strcmp (fmt_str
, "%s") == 0)
4943 fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
4948 if (! arglist
|| TREE_CHAIN (arglist
))
4950 arg
= TREE_VALUE (arglist
);
4951 if (! POINTER_TYPE_P (TREE_TYPE (arg
)))
4954 if (target
!= const0_rtx
)
4956 len
= c_strlen (arg
, 1);
4957 if (! len
|| TREE_CODE (len
) != INTEGER_CST
)
4963 arglist
= build_tree_list (NULL_TREE
, arg
);
4964 arglist
= tree_cons (NULL_TREE
, dest
, arglist
);
4965 expand_expr (build_function_call_expr (fn
, arglist
),
4966 const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
4968 if (target
== const0_rtx
)
4970 return expand_expr (len
, target
, mode
, EXPAND_NORMAL
);
4976 /* Expand a call to either the entry or exit function profiler. */
4979 expand_builtin_profile_func (bool exitp
)
4983 this = DECL_RTL (current_function_decl
);
4984 gcc_assert (MEM_P (this));
4985 this = XEXP (this, 0);
4988 which
= profile_function_exit_libfunc
;
4990 which
= profile_function_entry_libfunc
;
4992 emit_library_call (which
, LCT_NORMAL
, VOIDmode
, 2, this, Pmode
,
4993 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS
,
5000 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5003 round_trampoline_addr (rtx tramp
)
5005 rtx temp
, addend
, mask
;
5007 /* If we don't need too much alignment, we'll have been guaranteed
5008 proper alignment by get_trampoline_type. */
5009 if (TRAMPOLINE_ALIGNMENT
<= STACK_BOUNDARY
)
5012 /* Round address up to desired boundary. */
5013 temp
= gen_reg_rtx (Pmode
);
5014 addend
= GEN_INT (TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
- 1);
5015 mask
= GEN_INT (-TRAMPOLINE_ALIGNMENT
/ BITS_PER_UNIT
);
5017 temp
= expand_simple_binop (Pmode
, PLUS
, tramp
, addend
,
5018 temp
, 0, OPTAB_LIB_WIDEN
);
5019 tramp
= expand_simple_binop (Pmode
, AND
, temp
, mask
,
5020 temp
, 0, OPTAB_LIB_WIDEN
);
5026 expand_builtin_init_trampoline (tree arglist
)
5028 tree t_tramp
, t_func
, t_chain
;
5029 rtx r_tramp
, r_func
, r_chain
;
5030 #ifdef TRAMPOLINE_TEMPLATE
5034 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
,
5035 POINTER_TYPE
, VOID_TYPE
))
5038 t_tramp
= TREE_VALUE (arglist
);
5039 arglist
= TREE_CHAIN (arglist
);
5040 t_func
= TREE_VALUE (arglist
);
5041 arglist
= TREE_CHAIN (arglist
);
5042 t_chain
= TREE_VALUE (arglist
);
5044 r_tramp
= expand_expr (t_tramp
, NULL_RTX
, VOIDmode
, 0);
5045 r_func
= expand_expr (t_func
, NULL_RTX
, VOIDmode
, 0);
5046 r_chain
= expand_expr (t_chain
, NULL_RTX
, VOIDmode
, 0);
5048 /* Generate insns to initialize the trampoline. */
5049 r_tramp
= round_trampoline_addr (r_tramp
);
5050 #ifdef TRAMPOLINE_TEMPLATE
5051 blktramp
= gen_rtx_MEM (BLKmode
, r_tramp
);
5052 set_mem_align (blktramp
, TRAMPOLINE_ALIGNMENT
);
5053 emit_block_move (blktramp
, assemble_trampoline_template (),
5054 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
5056 trampolines_created
= 1;
5057 INITIALIZE_TRAMPOLINE (r_tramp
, r_func
, r_chain
);
5063 expand_builtin_adjust_trampoline (tree arglist
)
5067 if (!validate_arglist (arglist
, POINTER_TYPE
, VOID_TYPE
))
5070 tramp
= expand_expr (TREE_VALUE (arglist
), NULL_RTX
, VOIDmode
, 0);
5071 tramp
= round_trampoline_addr (tramp
);
5072 #ifdef TRAMPOLINE_ADJUST_ADDRESS
5073 TRAMPOLINE_ADJUST_ADDRESS (tramp
);
5079 /* Expand a call to the built-in signbit, signbitf or signbitl function.
5080 Return NULL_RTX if a normal call should be emitted rather than expanding
5081 the function in-line. EXP is the expression that is a call to the builtin
5082 function; if convenient, the result should be placed in TARGET. */
5085 expand_builtin_signbit (tree exp
, rtx target
)
5087 const struct real_format
*fmt
;
5088 enum machine_mode fmode
, imode
, rmode
;
5089 HOST_WIDE_INT hi
, lo
;
5094 arglist
= TREE_OPERAND (exp
, 1);
5095 if (!validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
5098 arg
= TREE_VALUE (arglist
);
5099 fmode
= TYPE_MODE (TREE_TYPE (arg
));
5100 rmode
= TYPE_MODE (TREE_TYPE (exp
));
5101 fmt
= REAL_MODE_FORMAT (fmode
);
5103 /* For floating point formats without a sign bit, implement signbit
5105 bitpos
= fmt
->signbit_ro
;
5108 /* But we can't do this if the format supports signed zero. */
5109 if (fmt
->has_signed_zero
&& HONOR_SIGNED_ZEROS (fmode
))
5112 arg
= fold (build2 (LT_EXPR
, TREE_TYPE (exp
), arg
,
5113 build_real (TREE_TYPE (arg
), dconst0
)));
5114 return expand_expr (arg
, target
, VOIDmode
, EXPAND_NORMAL
);
5117 temp
= expand_expr (arg
, NULL_RTX
, VOIDmode
, 0);
5118 if (GET_MODE_SIZE (fmode
) <= UNITS_PER_WORD
)
5120 imode
= int_mode_for_mode (fmode
);
5121 if (imode
== BLKmode
)
5123 temp
= gen_lowpart (imode
, temp
);
5128 /* Handle targets with different FP word orders. */
5129 if (FLOAT_WORDS_BIG_ENDIAN
)
5130 word
= (GET_MODE_BITSIZE (fmode
) - bitpos
) / BITS_PER_WORD
;
5132 word
= bitpos
/ BITS_PER_WORD
;
5133 temp
= operand_subword_force (temp
, word
, fmode
);
5134 bitpos
= bitpos
% BITS_PER_WORD
;
5137 /* Force the intermediate word_mode (or narrower) result into a
5138 register. This avoids attempting to create paradoxical SUBREGs
5139 of floating point modes below. */
5140 temp
= force_reg (imode
, temp
);
5142 /* If the bitpos is within the "result mode" lowpart, the operation
5143 can be implement with a single bitwise AND. Otherwise, we need
5144 a right shift and an AND. */
5146 if (bitpos
< GET_MODE_BITSIZE (rmode
))
5148 if (bitpos
< HOST_BITS_PER_WIDE_INT
)
5151 lo
= (HOST_WIDE_INT
) 1 << bitpos
;
5155 hi
= (HOST_WIDE_INT
) 1 << (bitpos
- HOST_BITS_PER_WIDE_INT
);
5160 temp
= gen_lowpart (rmode
, temp
);
5161 temp
= expand_binop (rmode
, and_optab
, temp
,
5162 immed_double_const (lo
, hi
, rmode
),
5163 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5167 /* Perform a logical right shift to place the signbit in the least
5168 significant bit, then truncate the result to the desired mode
5169 and mask just this bit. */
5170 temp
= expand_shift (RSHIFT_EXPR
, imode
, temp
,
5171 build_int_cst (NULL_TREE
, bitpos
), NULL_RTX
, 1);
5172 temp
= gen_lowpart (rmode
, temp
);
5173 temp
= expand_binop (rmode
, and_optab
, temp
, const1_rtx
,
5174 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
5180 /* Expand fork or exec calls. TARGET is the desired target of the
5181 call. ARGLIST is the list of arguments of the call. FN is the
5182 identificator of the actual function. IGNORE is nonzero if the
5183 value is to be ignored. */
5186 expand_builtin_fork_or_exec (tree fn
, tree arglist
, rtx target
, int ignore
)
5191 /* If we are not profiling, just call the function. */
5192 if (!profile_arc_flag
)
5195 /* Otherwise call the wrapper. This should be equivalent for the rest of
5196 compiler, so the code does not diverge, and the wrapper may run the
5197 code necessary for keeping the profiling sane. */
5199 switch (DECL_FUNCTION_CODE (fn
))
5202 id
= get_identifier ("__gcov_fork");
5205 case BUILT_IN_EXECL
:
5206 id
= get_identifier ("__gcov_execl");
5209 case BUILT_IN_EXECV
:
5210 id
= get_identifier ("__gcov_execv");
5213 case BUILT_IN_EXECLP
:
5214 id
= get_identifier ("__gcov_execlp");
5217 case BUILT_IN_EXECLE
:
5218 id
= get_identifier ("__gcov_execle");
5221 case BUILT_IN_EXECVP
:
5222 id
= get_identifier ("__gcov_execvp");
5225 case BUILT_IN_EXECVE
:
5226 id
= get_identifier ("__gcov_execve");
5233 decl
= build_decl (FUNCTION_DECL
, id
, TREE_TYPE (fn
));
5234 DECL_EXTERNAL (decl
) = 1;
5235 TREE_PUBLIC (decl
) = 1;
5236 DECL_ARTIFICIAL (decl
) = 1;
5237 TREE_NOTHROW (decl
) = 1;
5238 call
= build_function_call_expr (decl
, arglist
);
5240 return expand_call (call
, target
, ignore
);
5244 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5245 ARGLIST is the operands list to the function. CODE is the rtx code
5246 that corresponds to the arithmetic or logical operation from the name;
5247 an exception here is that NOT actually means NAND. TARGET is an optional
5248 place for us to store the results; AFTER is true if this is the
5249 fetch_and_xxx form. IGNORE is true if we don't actually care about
5250 the result of the operation at all. */
5253 expand_builtin_sync_operation (tree arglist
, enum rtx_code code
, bool after
,
5254 rtx target
, bool ignore
)
5256 enum machine_mode mode
;
5260 /* Expand the operands. */
5261 addr
= expand_expr (TREE_VALUE (arglist
), NULL
, Pmode
, EXPAND_SUM
);
5262 mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (TREE_VALUE (arglist
))));
5264 arglist
= TREE_CHAIN (arglist
);
5265 valt
= TREE_VALUE (arglist
);
5266 if (code
== NOT
&& TREE_CONSTANT (valt
))
5268 valt
= fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (valt
), valt
);
5271 val
= expand_expr (valt
, NULL
, mode
, EXPAND_NORMAL
);
5273 /* Note that we explicitly do not want any alias information for this
5274 memory, so that we kill all other live memories. Otherwise we don't
5275 satisfy the full barrier semantics of the intrinsic. */
5276 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5277 MEM_VOLATILE_P (mem
) = 1;
5280 return expand_sync_operation (mem
, val
, code
);
5282 return expand_sync_fetch_operation (mem
, val
, code
, after
, target
);
5285 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5286 intrinsics. ARGLIST is the operands list to the function. IS_BOOL is
5287 true if this is the boolean form. TARGET is a place for us to store the
5288 results; this is NOT optional if IS_BOOL is true. */
5291 expand_builtin_compare_and_swap (tree arglist
, bool is_bool
, rtx target
)
5293 enum machine_mode mode
;
5294 rtx addr
, old_val
, new_val
, mem
;
5296 /* Expand the operands. */
5297 addr
= expand_expr (TREE_VALUE (arglist
), NULL
, Pmode
, EXPAND_SUM
);
5298 mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (TREE_VALUE (arglist
))));
5300 arglist
= TREE_CHAIN (arglist
);
5301 old_val
= expand_expr (TREE_VALUE (arglist
), NULL
, mode
, EXPAND_NORMAL
);
5303 arglist
= TREE_CHAIN (arglist
);
5304 new_val
= expand_expr (TREE_VALUE (arglist
), NULL
, mode
, EXPAND_NORMAL
);
5306 /* Note that we explicitly do not want any alias information for this
5307 memory, so that we kill all other live memories. Otherwise we don't
5308 satisfy the full barrier semantics of the intrinsic. */
5309 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5310 MEM_VOLATILE_P (mem
) = 1;
5313 return expand_bool_compare_and_swap (mem
, old_val
, new_val
, target
);
5315 return expand_val_compare_and_swap (mem
, old_val
, new_val
, target
);
5318 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5319 general form is actually an atomic exchange, and some targets only
5320 support a reduced form with the second argument being a constant 1.
5321 ARGLIST is the operands list to the function; TARGET is an optional
5322 place for us to store the results. */
5325 expand_builtin_lock_test_and_set (tree arglist
, rtx target
)
5327 enum machine_mode mode
;
5330 /* Expand the operands. */
5331 addr
= expand_expr (TREE_VALUE (arglist
), NULL
, Pmode
, EXPAND_NORMAL
);
5332 mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (TREE_VALUE (arglist
))));
5334 arglist
= TREE_CHAIN (arglist
);
5335 val
= expand_expr (TREE_VALUE (arglist
), NULL
, mode
, EXPAND_NORMAL
);
5337 /* Note that we explicitly do not want any alias information for this
5338 memory, so that we kill all other live memories. Otherwise we don't
5339 satisfy the barrier semantics of the intrinsic. */
5340 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5341 MEM_VOLATILE_P (mem
) = 1;
5343 return expand_sync_lock_test_and_set (mem
, val
, target
);
5346 /* Expand the __sync_synchronize intrinsic. */
5349 expand_builtin_synchronize (void)
5353 #ifdef HAVE_memory_barrier
5354 if (HAVE_memory_barrier
)
5356 emit_insn (gen_memory_barrier ());
5361 /* If no explicit memory barrier instruction is available, create an empty
5362 asm stmt that will prevent compiler movement across the barrier. */
5363 body
= gen_rtx_ASM_INPUT (VOIDmode
, "");
5364 MEM_VOLATILE_P (body
) = 1;
5368 /* Expand the __sync_lock_release intrinsic. ARGLIST is the operands list
5372 expand_builtin_lock_release (tree arglist
)
5374 enum machine_mode mode
;
5375 enum insn_code icode
;
5376 rtx addr
, val
, mem
, insn
;
5378 /* Expand the operands. */
5379 addr
= expand_expr (TREE_VALUE (arglist
), NULL
, Pmode
, EXPAND_NORMAL
);
5380 mode
= TYPE_MODE (TREE_TYPE (TREE_TYPE (TREE_VALUE (arglist
))));
5383 /* Note that we explicitly do not want any alias information for this
5384 memory, so that we kill all other live memories. Otherwise we don't
5385 satisfy the barrier semantics of the intrinsic. */
5386 mem
= validize_mem (gen_rtx_MEM (mode
, addr
));
5387 MEM_VOLATILE_P (mem
) = 1;
5389 /* If there is an explicit operation in the md file, use it. */
5390 icode
= sync_lock_release
[mode
];
5391 if (icode
!= CODE_FOR_nothing
)
5393 if (!insn_data
[icode
].operand
[1].predicate (val
, mode
))
5394 val
= force_reg (mode
, val
);
5396 insn
= GEN_FCN (icode
) (mem
, val
);
5404 /* Otherwise we can implement this operation by emitting a barrier
5405 followed by a store of zero. */
5406 expand_builtin_synchronize ();
5407 emit_move_insn (mem
, val
);
5410 /* Expand an expression EXP that calls a built-in function,
5411 with result going to TARGET if that's convenient
5412 (and in mode MODE if that's convenient).
5413 SUBTARGET may be used as the target for computing one of EXP's operands.
5414 IGNORE is nonzero if the value is to be ignored. */
5417 expand_builtin (tree exp
, rtx target
, rtx subtarget
, enum machine_mode mode
,
5420 tree fndecl
= get_callee_fndecl (exp
);
5421 tree arglist
= TREE_OPERAND (exp
, 1);
5422 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
5423 enum machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
5425 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5426 return targetm
.expand_builtin (exp
, target
, subtarget
, mode
, ignore
);
5428 /* When not optimizing, generate calls to library functions for a certain
5431 && !called_as_built_in (fndecl
)
5432 && DECL_ASSEMBLER_NAME_SET_P (fndecl
)
5433 && fcode
!= BUILT_IN_ALLOCA
)
5434 return expand_call (exp
, target
, ignore
);
5436 /* The built-in function expanders test for target == const0_rtx
5437 to determine whether the function's result will be ignored. */
5439 target
= const0_rtx
;
5441 /* If the result of a pure or const built-in function is ignored, and
5442 none of its arguments are volatile, we can avoid expanding the
5443 built-in call and just evaluate the arguments for side-effects. */
5444 if (target
== const0_rtx
5445 && (DECL_IS_PURE (fndecl
) || TREE_READONLY (fndecl
)))
5447 bool volatilep
= false;
5450 for (arg
= arglist
; arg
; arg
= TREE_CHAIN (arg
))
5451 if (TREE_THIS_VOLATILE (TREE_VALUE (arg
)))
5459 for (arg
= arglist
; arg
; arg
= TREE_CHAIN (arg
))
5460 expand_expr (TREE_VALUE (arg
), const0_rtx
,
5461 VOIDmode
, EXPAND_NORMAL
);
5469 case BUILT_IN_FABSF
:
5470 case BUILT_IN_FABSL
:
5471 target
= expand_builtin_fabs (arglist
, target
, subtarget
);
5476 case BUILT_IN_COPYSIGN
:
5477 case BUILT_IN_COPYSIGNF
:
5478 case BUILT_IN_COPYSIGNL
:
5479 target
= expand_builtin_copysign (arglist
, target
, subtarget
);
5484 /* Just do a normal library call if we were unable to fold
5487 case BUILT_IN_CABSF
:
5488 case BUILT_IN_CABSL
:
5494 case BUILT_IN_EXP10
:
5495 case BUILT_IN_EXP10F
:
5496 case BUILT_IN_EXP10L
:
5497 case BUILT_IN_POW10
:
5498 case BUILT_IN_POW10F
:
5499 case BUILT_IN_POW10L
:
5501 case BUILT_IN_EXP2F
:
5502 case BUILT_IN_EXP2L
:
5503 case BUILT_IN_EXPM1
:
5504 case BUILT_IN_EXPM1F
:
5505 case BUILT_IN_EXPM1L
:
5507 case BUILT_IN_LOGBF
:
5508 case BUILT_IN_LOGBL
:
5509 case BUILT_IN_ILOGB
:
5510 case BUILT_IN_ILOGBF
:
5511 case BUILT_IN_ILOGBL
:
5515 case BUILT_IN_LOG10
:
5516 case BUILT_IN_LOG10F
:
5517 case BUILT_IN_LOG10L
:
5519 case BUILT_IN_LOG2F
:
5520 case BUILT_IN_LOG2L
:
5521 case BUILT_IN_LOG1P
:
5522 case BUILT_IN_LOG1PF
:
5523 case BUILT_IN_LOG1PL
:
5528 case BUILT_IN_ASINF
:
5529 case BUILT_IN_ASINL
:
5531 case BUILT_IN_ACOSF
:
5532 case BUILT_IN_ACOSL
:
5534 case BUILT_IN_ATANF
:
5535 case BUILT_IN_ATANL
:
5536 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5537 because of possible accuracy problems. */
5538 if (! flag_unsafe_math_optimizations
)
5541 case BUILT_IN_SQRTF
:
5542 case BUILT_IN_SQRTL
:
5543 case BUILT_IN_FLOOR
:
5544 case BUILT_IN_FLOORF
:
5545 case BUILT_IN_FLOORL
:
5547 case BUILT_IN_CEILF
:
5548 case BUILT_IN_CEILL
:
5549 case BUILT_IN_TRUNC
:
5550 case BUILT_IN_TRUNCF
:
5551 case BUILT_IN_TRUNCL
:
5552 case BUILT_IN_ROUND
:
5553 case BUILT_IN_ROUNDF
:
5554 case BUILT_IN_ROUNDL
:
5555 case BUILT_IN_NEARBYINT
:
5556 case BUILT_IN_NEARBYINTF
:
5557 case BUILT_IN_NEARBYINTL
:
5559 case BUILT_IN_RINTF
:
5560 case BUILT_IN_RINTL
:
5561 case BUILT_IN_LRINT
:
5562 case BUILT_IN_LRINTF
:
5563 case BUILT_IN_LRINTL
:
5564 case BUILT_IN_LLRINT
:
5565 case BUILT_IN_LLRINTF
:
5566 case BUILT_IN_LLRINTL
:
5567 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
5572 case BUILT_IN_LCEIL
:
5573 case BUILT_IN_LCEILF
:
5574 case BUILT_IN_LCEILL
:
5575 case BUILT_IN_LLCEIL
:
5576 case BUILT_IN_LLCEILF
:
5577 case BUILT_IN_LLCEILL
:
5578 case BUILT_IN_LFLOOR
:
5579 case BUILT_IN_LFLOORF
:
5580 case BUILT_IN_LFLOORL
:
5581 case BUILT_IN_LLFLOOR
:
5582 case BUILT_IN_LLFLOORF
:
5583 case BUILT_IN_LLFLOORL
:
5584 target
= expand_builtin_int_roundingfn (exp
, target
, subtarget
);
5592 target
= expand_builtin_pow (exp
, target
, subtarget
);
5598 case BUILT_IN_POWIF
:
5599 case BUILT_IN_POWIL
:
5600 target
= expand_builtin_powi (exp
, target
, subtarget
);
5605 case BUILT_IN_ATAN2
:
5606 case BUILT_IN_ATAN2F
:
5607 case BUILT_IN_ATAN2L
:
5608 case BUILT_IN_LDEXP
:
5609 case BUILT_IN_LDEXPF
:
5610 case BUILT_IN_LDEXPL
:
5612 case BUILT_IN_FMODF
:
5613 case BUILT_IN_FMODL
:
5615 case BUILT_IN_DREMF
:
5616 case BUILT_IN_DREML
:
5617 if (! flag_unsafe_math_optimizations
)
5619 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
5630 if (! flag_unsafe_math_optimizations
)
5632 target
= expand_builtin_mathfn_3 (exp
, target
, subtarget
);
5637 case BUILT_IN_APPLY_ARGS
:
5638 return expand_builtin_apply_args ();
5640 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5641 FUNCTION with a copy of the parameters described by
5642 ARGUMENTS, and ARGSIZE. It returns a block of memory
5643 allocated on the stack into which is stored all the registers
5644 that might possibly be used for returning the result of a
5645 function. ARGUMENTS is the value returned by
5646 __builtin_apply_args. ARGSIZE is the number of bytes of
5647 arguments that must be copied. ??? How should this value be
5648 computed? We'll also need a safe worst case value for varargs
5650 case BUILT_IN_APPLY
:
5651 if (!validate_arglist (arglist
, POINTER_TYPE
,
5652 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
5653 && !validate_arglist (arglist
, REFERENCE_TYPE
,
5654 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
5662 for (t
= arglist
, i
= 0; t
; t
= TREE_CHAIN (t
), i
++)
5663 ops
[i
] = expand_expr (TREE_VALUE (t
), NULL_RTX
, VOIDmode
, 0);
5665 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
5668 /* __builtin_return (RESULT) causes the function to return the
5669 value described by RESULT. RESULT is address of the block of
5670 memory returned by __builtin_apply. */
5671 case BUILT_IN_RETURN
:
5672 if (validate_arglist (arglist
, POINTER_TYPE
, VOID_TYPE
))
5673 expand_builtin_return (expand_expr (TREE_VALUE (arglist
),
5674 NULL_RTX
, VOIDmode
, 0));
5677 case BUILT_IN_SAVEREGS
:
5678 return expand_builtin_saveregs ();
5680 case BUILT_IN_ARGS_INFO
:
5681 return expand_builtin_args_info (arglist
);
5683 /* Return the address of the first anonymous stack arg. */
5684 case BUILT_IN_NEXT_ARG
:
5685 if (fold_builtin_next_arg (arglist
))
5687 return expand_builtin_next_arg ();
5689 case BUILT_IN_CLASSIFY_TYPE
:
5690 return expand_builtin_classify_type (arglist
);
5692 case BUILT_IN_CONSTANT_P
:
5695 case BUILT_IN_FRAME_ADDRESS
:
5696 case BUILT_IN_RETURN_ADDRESS
:
5697 return expand_builtin_frame_address (fndecl
, arglist
);
5699 /* Returns the address of the area where the structure is returned.
5701 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
5703 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
5704 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl
))))
5707 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
5709 case BUILT_IN_ALLOCA
:
5710 target
= expand_builtin_alloca (arglist
, target
);
5715 case BUILT_IN_STACK_SAVE
:
5716 return expand_stack_save ();
5718 case BUILT_IN_STACK_RESTORE
:
5719 expand_stack_restore (TREE_VALUE (arglist
));
5724 case BUILT_IN_FFSLL
:
5725 case BUILT_IN_FFSIMAX
:
5726 target
= expand_builtin_unop (target_mode
, arglist
, target
,
5727 subtarget
, ffs_optab
);
5734 case BUILT_IN_CLZLL
:
5735 case BUILT_IN_CLZIMAX
:
5736 target
= expand_builtin_unop (target_mode
, arglist
, target
,
5737 subtarget
, clz_optab
);
5744 case BUILT_IN_CTZLL
:
5745 case BUILT_IN_CTZIMAX
:
5746 target
= expand_builtin_unop (target_mode
, arglist
, target
,
5747 subtarget
, ctz_optab
);
5752 case BUILT_IN_POPCOUNT
:
5753 case BUILT_IN_POPCOUNTL
:
5754 case BUILT_IN_POPCOUNTLL
:
5755 case BUILT_IN_POPCOUNTIMAX
:
5756 target
= expand_builtin_unop (target_mode
, arglist
, target
,
5757 subtarget
, popcount_optab
);
5762 case BUILT_IN_PARITY
:
5763 case BUILT_IN_PARITYL
:
5764 case BUILT_IN_PARITYLL
:
5765 case BUILT_IN_PARITYIMAX
:
5766 target
= expand_builtin_unop (target_mode
, arglist
, target
,
5767 subtarget
, parity_optab
);
5772 case BUILT_IN_STRLEN
:
5773 target
= expand_builtin_strlen (arglist
, target
, target_mode
);
5778 case BUILT_IN_STRCPY
:
5779 target
= expand_builtin_strcpy (exp
, target
, mode
);
5784 case BUILT_IN_STRNCPY
:
5785 target
= expand_builtin_strncpy (exp
, target
, mode
);
5790 case BUILT_IN_STPCPY
:
5791 target
= expand_builtin_stpcpy (exp
, target
, mode
);
5796 case BUILT_IN_STRCAT
:
5797 target
= expand_builtin_strcat (arglist
, TREE_TYPE (exp
), target
, mode
);
5802 case BUILT_IN_STRNCAT
:
5803 target
= expand_builtin_strncat (arglist
, target
, mode
);
5808 case BUILT_IN_STRSPN
:
5809 target
= expand_builtin_strspn (arglist
, target
, mode
);
5814 case BUILT_IN_STRCSPN
:
5815 target
= expand_builtin_strcspn (arglist
, target
, mode
);
5820 case BUILT_IN_STRSTR
:
5821 target
= expand_builtin_strstr (arglist
, TREE_TYPE (exp
), target
, mode
);
5826 case BUILT_IN_STRPBRK
:
5827 target
= expand_builtin_strpbrk (arglist
, TREE_TYPE (exp
), target
, mode
);
5832 case BUILT_IN_INDEX
:
5833 case BUILT_IN_STRCHR
:
5834 target
= expand_builtin_strchr (arglist
, TREE_TYPE (exp
), target
, mode
);
5839 case BUILT_IN_RINDEX
:
5840 case BUILT_IN_STRRCHR
:
5841 target
= expand_builtin_strrchr (arglist
, TREE_TYPE (exp
), target
, mode
);
5846 case BUILT_IN_MEMCPY
:
5847 target
= expand_builtin_memcpy (exp
, target
, mode
);
5852 case BUILT_IN_MEMPCPY
:
5853 target
= expand_builtin_mempcpy (arglist
, TREE_TYPE (exp
), target
, mode
, /*endp=*/ 1);
5858 case BUILT_IN_MEMMOVE
:
5859 target
= expand_builtin_memmove (arglist
, TREE_TYPE (exp
), target
, mode
);
5864 case BUILT_IN_BCOPY
:
5865 target
= expand_builtin_bcopy (arglist
, TREE_TYPE (exp
));
5870 case BUILT_IN_MEMSET
:
5871 target
= expand_builtin_memset (arglist
, target
, mode
);
5876 case BUILT_IN_BZERO
:
5877 target
= expand_builtin_bzero (arglist
);
5882 case BUILT_IN_STRCMP
:
5883 target
= expand_builtin_strcmp (exp
, target
, mode
);
5888 case BUILT_IN_STRNCMP
:
5889 target
= expand_builtin_strncmp (exp
, target
, mode
);
5895 case BUILT_IN_MEMCMP
:
5896 target
= expand_builtin_memcmp (exp
, arglist
, target
, mode
);
5901 case BUILT_IN_SETJMP
:
5902 target
= expand_builtin_setjmp (arglist
, target
);
5907 /* __builtin_longjmp is passed a pointer to an array of five words.
5908 It's similar to the C library longjmp function but works with
5909 __builtin_setjmp above. */
5910 case BUILT_IN_LONGJMP
:
5911 if (!validate_arglist (arglist
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
5915 rtx buf_addr
= expand_expr (TREE_VALUE (arglist
), subtarget
,
5917 rtx value
= expand_expr (TREE_VALUE (TREE_CHAIN (arglist
)),
5918 NULL_RTX
, VOIDmode
, 0);
5920 if (value
!= const1_rtx
)
5922 error ("%<__builtin_longjmp%> second argument must be 1");
5926 expand_builtin_longjmp (buf_addr
, value
);
5930 case BUILT_IN_NONLOCAL_GOTO
:
5931 target
= expand_builtin_nonlocal_goto (arglist
);
5936 /* This updates the setjmp buffer that is its argument with the value
5937 of the current stack pointer. */
5938 case BUILT_IN_UPDATE_SETJMP_BUF
:
5939 if (validate_arglist (arglist
, POINTER_TYPE
, VOID_TYPE
))
5942 = expand_expr (TREE_VALUE (arglist
), NULL_RTX
, VOIDmode
, 0);
5944 expand_builtin_update_setjmp_buf (buf_addr
);
5950 expand_builtin_trap ();
5953 case BUILT_IN_PRINTF
:
5954 target
= expand_builtin_printf (arglist
, target
, mode
, false);
5959 case BUILT_IN_PRINTF_UNLOCKED
:
5960 target
= expand_builtin_printf (arglist
, target
, mode
, true);
5965 case BUILT_IN_FPUTS
:
5966 target
= expand_builtin_fputs (arglist
, target
, false);
5970 case BUILT_IN_FPUTS_UNLOCKED
:
5971 target
= expand_builtin_fputs (arglist
, target
, true);
5976 case BUILT_IN_FPRINTF
:
5977 target
= expand_builtin_fprintf (arglist
, target
, mode
, false);
5982 case BUILT_IN_FPRINTF_UNLOCKED
:
5983 target
= expand_builtin_fprintf (arglist
, target
, mode
, true);
5988 case BUILT_IN_SPRINTF
:
5989 target
= expand_builtin_sprintf (arglist
, target
, mode
);
5994 case BUILT_IN_SIGNBIT
:
5995 case BUILT_IN_SIGNBITF
:
5996 case BUILT_IN_SIGNBITL
:
5997 target
= expand_builtin_signbit (exp
, target
);
6002 /* Various hooks for the DWARF 2 __throw routine. */
6003 case BUILT_IN_UNWIND_INIT
:
6004 expand_builtin_unwind_init ();
6006 case BUILT_IN_DWARF_CFA
:
6007 return virtual_cfa_rtx
;
6008 #ifdef DWARF2_UNWIND_INFO
6009 case BUILT_IN_DWARF_SP_COLUMN
:
6010 return expand_builtin_dwarf_sp_column ();
6011 case BUILT_IN_INIT_DWARF_REG_SIZES
:
6012 expand_builtin_init_dwarf_reg_sizes (TREE_VALUE (arglist
));
6015 case BUILT_IN_FROB_RETURN_ADDR
:
6016 return expand_builtin_frob_return_addr (TREE_VALUE (arglist
));
6017 case BUILT_IN_EXTRACT_RETURN_ADDR
:
6018 return expand_builtin_extract_return_addr (TREE_VALUE (arglist
));
6019 case BUILT_IN_EH_RETURN
:
6020 expand_builtin_eh_return (TREE_VALUE (arglist
),
6021 TREE_VALUE (TREE_CHAIN (arglist
)));
6023 #ifdef EH_RETURN_DATA_REGNO
6024 case BUILT_IN_EH_RETURN_DATA_REGNO
:
6025 return expand_builtin_eh_return_data_regno (arglist
);
6027 case BUILT_IN_EXTEND_POINTER
:
6028 return expand_builtin_extend_pointer (TREE_VALUE (arglist
));
6030 case BUILT_IN_VA_START
:
6031 case BUILT_IN_STDARG_START
:
6032 return expand_builtin_va_start (arglist
);
6033 case BUILT_IN_VA_END
:
6034 return expand_builtin_va_end (arglist
);
6035 case BUILT_IN_VA_COPY
:
6036 return expand_builtin_va_copy (arglist
);
6037 case BUILT_IN_EXPECT
:
6038 return expand_builtin_expect (arglist
, target
);
6039 case BUILT_IN_PREFETCH
:
6040 expand_builtin_prefetch (arglist
);
6043 case BUILT_IN_PROFILE_FUNC_ENTER
:
6044 return expand_builtin_profile_func (false);
6045 case BUILT_IN_PROFILE_FUNC_EXIT
:
6046 return expand_builtin_profile_func (true);
6048 case BUILT_IN_INIT_TRAMPOLINE
:
6049 return expand_builtin_init_trampoline (arglist
);
6050 case BUILT_IN_ADJUST_TRAMPOLINE
:
6051 return expand_builtin_adjust_trampoline (arglist
);
6054 case BUILT_IN_EXECL
:
6055 case BUILT_IN_EXECV
:
6056 case BUILT_IN_EXECLP
:
6057 case BUILT_IN_EXECLE
:
6058 case BUILT_IN_EXECVP
:
6059 case BUILT_IN_EXECVE
:
6060 target
= expand_builtin_fork_or_exec (fndecl
, arglist
, target
, ignore
);
6065 case BUILT_IN_FETCH_AND_ADD_1
:
6066 case BUILT_IN_FETCH_AND_ADD_2
:
6067 case BUILT_IN_FETCH_AND_ADD_4
:
6068 case BUILT_IN_FETCH_AND_ADD_8
:
6069 target
= expand_builtin_sync_operation (arglist
, PLUS
,
6070 false, target
, ignore
);
6075 case BUILT_IN_FETCH_AND_SUB_1
:
6076 case BUILT_IN_FETCH_AND_SUB_2
:
6077 case BUILT_IN_FETCH_AND_SUB_4
:
6078 case BUILT_IN_FETCH_AND_SUB_8
:
6079 target
= expand_builtin_sync_operation (arglist
, MINUS
,
6080 false, target
, ignore
);
6085 case BUILT_IN_FETCH_AND_OR_1
:
6086 case BUILT_IN_FETCH_AND_OR_2
:
6087 case BUILT_IN_FETCH_AND_OR_4
:
6088 case BUILT_IN_FETCH_AND_OR_8
:
6089 target
= expand_builtin_sync_operation (arglist
, IOR
,
6090 false, target
, ignore
);
6095 case BUILT_IN_FETCH_AND_AND_1
:
6096 case BUILT_IN_FETCH_AND_AND_2
:
6097 case BUILT_IN_FETCH_AND_AND_4
:
6098 case BUILT_IN_FETCH_AND_AND_8
:
6099 target
= expand_builtin_sync_operation (arglist
, AND
,
6100 false, target
, ignore
);
6105 case BUILT_IN_FETCH_AND_XOR_1
:
6106 case BUILT_IN_FETCH_AND_XOR_2
:
6107 case BUILT_IN_FETCH_AND_XOR_4
:
6108 case BUILT_IN_FETCH_AND_XOR_8
:
6109 target
= expand_builtin_sync_operation (arglist
, XOR
,
6110 false, target
, ignore
);
6115 case BUILT_IN_FETCH_AND_NAND_1
:
6116 case BUILT_IN_FETCH_AND_NAND_2
:
6117 case BUILT_IN_FETCH_AND_NAND_4
:
6118 case BUILT_IN_FETCH_AND_NAND_8
:
6119 target
= expand_builtin_sync_operation (arglist
, NOT
,
6120 false, target
, ignore
);
6125 case BUILT_IN_ADD_AND_FETCH_1
:
6126 case BUILT_IN_ADD_AND_FETCH_2
:
6127 case BUILT_IN_ADD_AND_FETCH_4
:
6128 case BUILT_IN_ADD_AND_FETCH_8
:
6129 target
= expand_builtin_sync_operation (arglist
, PLUS
,
6130 true, target
, ignore
);
6135 case BUILT_IN_SUB_AND_FETCH_1
:
6136 case BUILT_IN_SUB_AND_FETCH_2
:
6137 case BUILT_IN_SUB_AND_FETCH_4
:
6138 case BUILT_IN_SUB_AND_FETCH_8
:
6139 target
= expand_builtin_sync_operation (arglist
, MINUS
,
6140 true, target
, ignore
);
6145 case BUILT_IN_OR_AND_FETCH_1
:
6146 case BUILT_IN_OR_AND_FETCH_2
:
6147 case BUILT_IN_OR_AND_FETCH_4
:
6148 case BUILT_IN_OR_AND_FETCH_8
:
6149 target
= expand_builtin_sync_operation (arglist
, IOR
,
6150 true, target
, ignore
);
6155 case BUILT_IN_AND_AND_FETCH_1
:
6156 case BUILT_IN_AND_AND_FETCH_2
:
6157 case BUILT_IN_AND_AND_FETCH_4
:
6158 case BUILT_IN_AND_AND_FETCH_8
:
6159 target
= expand_builtin_sync_operation (arglist
, AND
,
6160 true, target
, ignore
);
6165 case BUILT_IN_XOR_AND_FETCH_1
:
6166 case BUILT_IN_XOR_AND_FETCH_2
:
6167 case BUILT_IN_XOR_AND_FETCH_4
:
6168 case BUILT_IN_XOR_AND_FETCH_8
:
6169 target
= expand_builtin_sync_operation (arglist
, XOR
,
6170 true, target
, ignore
);
6175 case BUILT_IN_NAND_AND_FETCH_1
:
6176 case BUILT_IN_NAND_AND_FETCH_2
:
6177 case BUILT_IN_NAND_AND_FETCH_4
:
6178 case BUILT_IN_NAND_AND_FETCH_8
:
6179 target
= expand_builtin_sync_operation (arglist
, NOT
,
6180 true, target
, ignore
);
6185 case BUILT_IN_BOOL_COMPARE_AND_SWAP_1
:
6186 case BUILT_IN_BOOL_COMPARE_AND_SWAP_2
:
6187 case BUILT_IN_BOOL_COMPARE_AND_SWAP_4
:
6188 case BUILT_IN_BOOL_COMPARE_AND_SWAP_8
:
6189 if (mode
== VOIDmode
)
6190 mode
= TYPE_MODE (boolean_type_node
);
6191 if (!target
|| !register_operand (target
, mode
))
6192 target
= gen_reg_rtx (mode
);
6193 target
= expand_builtin_compare_and_swap (arglist
, true, target
);
6198 case BUILT_IN_VAL_COMPARE_AND_SWAP_1
:
6199 case BUILT_IN_VAL_COMPARE_AND_SWAP_2
:
6200 case BUILT_IN_VAL_COMPARE_AND_SWAP_4
:
6201 case BUILT_IN_VAL_COMPARE_AND_SWAP_8
:
6202 target
= expand_builtin_compare_and_swap (arglist
, false, target
);
6207 case BUILT_IN_LOCK_TEST_AND_SET_1
:
6208 case BUILT_IN_LOCK_TEST_AND_SET_2
:
6209 case BUILT_IN_LOCK_TEST_AND_SET_4
:
6210 case BUILT_IN_LOCK_TEST_AND_SET_8
:
6211 target
= expand_builtin_lock_test_and_set (arglist
, target
);
6216 case BUILT_IN_LOCK_RELEASE_1
:
6217 case BUILT_IN_LOCK_RELEASE_2
:
6218 case BUILT_IN_LOCK_RELEASE_4
:
6219 case BUILT_IN_LOCK_RELEASE_8
:
6220 expand_builtin_lock_release (arglist
);
6223 case BUILT_IN_SYNCHRONIZE
:
6224 expand_builtin_synchronize ();
6227 default: /* just do library call, if unknown builtin */
6231 /* The switch statement above can drop through to cause the function
6232 to be called normally. */
6233 return expand_call (exp
, target
, ignore
);
6236 /* Determine whether a tree node represents a call to a built-in
6237 function. If the tree T is a call to a built-in function with
6238 the right number of arguments of the appropriate types, return
6239 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6240 Otherwise the return value is END_BUILTINS. */
6242 enum built_in_function
6243 builtin_mathfn_code (tree t
)
6245 tree fndecl
, arglist
, parmlist
;
6246 tree argtype
, parmtype
;
6248 if (TREE_CODE (t
) != CALL_EXPR
6249 || TREE_CODE (TREE_OPERAND (t
, 0)) != ADDR_EXPR
)
6250 return END_BUILTINS
;
6252 fndecl
= get_callee_fndecl (t
);
6253 if (fndecl
== NULL_TREE
6254 || TREE_CODE (fndecl
) != FUNCTION_DECL
6255 || ! DECL_BUILT_IN (fndecl
)
6256 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
6257 return END_BUILTINS
;
6259 arglist
= TREE_OPERAND (t
, 1);
6260 parmlist
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
6261 for (; parmlist
; parmlist
= TREE_CHAIN (parmlist
))
6263 /* If a function doesn't take a variable number of arguments,
6264 the last element in the list will have type `void'. */
6265 parmtype
= TREE_VALUE (parmlist
);
6266 if (VOID_TYPE_P (parmtype
))
6269 return END_BUILTINS
;
6270 return DECL_FUNCTION_CODE (fndecl
);
6274 return END_BUILTINS
;
6276 argtype
= TREE_TYPE (TREE_VALUE (arglist
));
6278 if (SCALAR_FLOAT_TYPE_P (parmtype
))
6280 if (! SCALAR_FLOAT_TYPE_P (argtype
))
6281 return END_BUILTINS
;
6283 else if (COMPLEX_FLOAT_TYPE_P (parmtype
))
6285 if (! COMPLEX_FLOAT_TYPE_P (argtype
))
6286 return END_BUILTINS
;
6288 else if (POINTER_TYPE_P (parmtype
))
6290 if (! POINTER_TYPE_P (argtype
))
6291 return END_BUILTINS
;
6293 else if (INTEGRAL_TYPE_P (parmtype
))
6295 if (! INTEGRAL_TYPE_P (argtype
))
6296 return END_BUILTINS
;
6299 return END_BUILTINS
;
6301 arglist
= TREE_CHAIN (arglist
);
6304 /* Variable-length argument list. */
6305 return DECL_FUNCTION_CODE (fndecl
);
6308 /* Fold a call to __builtin_constant_p, if we know it will evaluate to a
6309 constant. ARGLIST is the argument list of the call. */
6312 fold_builtin_constant_p (tree arglist
)
6317 arglist
= TREE_VALUE (arglist
);
6319 /* We return 1 for a numeric type that's known to be a constant
6320 value at compile-time or for an aggregate type that's a
6321 literal constant. */
6322 STRIP_NOPS (arglist
);
6324 /* If we know this is a constant, emit the constant of one. */
6325 if (CONSTANT_CLASS_P (arglist
)
6326 || (TREE_CODE (arglist
) == CONSTRUCTOR
6327 && TREE_CONSTANT (arglist
))
6328 || (TREE_CODE (arglist
) == ADDR_EXPR
6329 && TREE_CODE (TREE_OPERAND (arglist
, 0)) == STRING_CST
))
6330 return integer_one_node
;
6332 /* If this expression has side effects, show we don't know it to be a
6333 constant. Likewise if it's a pointer or aggregate type since in
6334 those case we only want literals, since those are only optimized
6335 when generating RTL, not later.
6336 And finally, if we are compiling an initializer, not code, we
6337 need to return a definite result now; there's not going to be any
6338 more optimization done. */
6339 if (TREE_SIDE_EFFECTS (arglist
)
6340 || AGGREGATE_TYPE_P (TREE_TYPE (arglist
))
6341 || POINTER_TYPE_P (TREE_TYPE (arglist
))
6343 return integer_zero_node
;
6348 /* Fold a call to __builtin_expect, if we expect that a comparison against
6349 the argument will fold to a constant. In practice, this means a true
6350 constant or the address of a non-weak symbol. ARGLIST is the argument
6351 list of the call. */
6354 fold_builtin_expect (tree arglist
)
6361 arg
= TREE_VALUE (arglist
);
6363 /* If the argument isn't invariant, then there's nothing we can do. */
6364 if (!TREE_INVARIANT (arg
))
6367 /* If we're looking at an address of a weak decl, then do not fold. */
6370 if (TREE_CODE (inner
) == ADDR_EXPR
)
6374 inner
= TREE_OPERAND (inner
, 0);
6376 while (TREE_CODE (inner
) == COMPONENT_REF
6377 || TREE_CODE (inner
) == ARRAY_REF
);
6378 if (DECL_P (inner
) && DECL_WEAK (inner
))
6382 /* Otherwise, ARG already has the proper type for the return value. */
6386 /* Fold a call to __builtin_classify_type. */
6389 fold_builtin_classify_type (tree arglist
)
6392 return build_int_cst (NULL_TREE
, no_type_class
);
6394 return build_int_cst (NULL_TREE
,
6395 type_to_class (TREE_TYPE (TREE_VALUE (arglist
))));
6398 /* Fold a call to __builtin_strlen. */
6401 fold_builtin_strlen (tree arglist
)
6403 if (!validate_arglist (arglist
, POINTER_TYPE
, VOID_TYPE
))
6407 tree len
= c_strlen (TREE_VALUE (arglist
), 0);
6411 /* Convert from the internal "sizetype" type to "size_t". */
6413 len
= fold_convert (size_type_node
, len
);
6421 /* Fold a call to __builtin_inf or __builtin_huge_val. */
6424 fold_builtin_inf (tree type
, int warn
)
6426 REAL_VALUE_TYPE real
;
6428 /* __builtin_inff is intended to be usable to define INFINITY on all
6429 targets. If an infinity is not available, INFINITY expands "to a
6430 positive constant of type float that overflows at translation
6431 time", footnote "In this case, using INFINITY will violate the
6432 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
6433 Thus we pedwarn to ensure this constraint violation is
6435 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
6436 pedwarn ("target format does not support infinity");
6439 return build_real (type
, real
);
6442 /* Fold a call to __builtin_nan or __builtin_nans. */
6445 fold_builtin_nan (tree arglist
, tree type
, int quiet
)
6447 REAL_VALUE_TYPE real
;
6450 if (!validate_arglist (arglist
, POINTER_TYPE
, VOID_TYPE
))
6452 str
= c_getstr (TREE_VALUE (arglist
));
6456 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
6459 return build_real (type
, real
);
6462 /* Return true if the floating point expression T has an integer value.
6463 We also allow +Inf, -Inf and NaN to be considered integer values. */
6466 integer_valued_real_p (tree t
)
6468 switch (TREE_CODE (t
))
6475 case NON_LVALUE_EXPR
:
6476 return integer_valued_real_p (TREE_OPERAND (t
, 0));
6481 return integer_valued_real_p (TREE_OPERAND (t
, 1));
6488 return integer_valued_real_p (TREE_OPERAND (t
, 0))
6489 && integer_valued_real_p (TREE_OPERAND (t
, 1));
6492 return integer_valued_real_p (TREE_OPERAND (t
, 1))
6493 && integer_valued_real_p (TREE_OPERAND (t
, 2));
6496 if (! TREE_CONSTANT_OVERFLOW (t
))
6498 REAL_VALUE_TYPE c
, cint
;
6500 c
= TREE_REAL_CST (t
);
6501 real_trunc (&cint
, TYPE_MODE (TREE_TYPE (t
)), &c
);
6502 return real_identical (&c
, &cint
);
6507 tree type
= TREE_TYPE (TREE_OPERAND (t
, 0));
6508 if (TREE_CODE (type
) == INTEGER_TYPE
)
6510 if (TREE_CODE (type
) == REAL_TYPE
)
6511 return integer_valued_real_p (TREE_OPERAND (t
, 0));
6516 switch (builtin_mathfn_code (t
))
6519 case BUILT_IN_CEILF
:
6520 case BUILT_IN_CEILL
:
6521 case BUILT_IN_FLOOR
:
6522 case BUILT_IN_FLOORF
:
6523 case BUILT_IN_FLOORL
:
6524 case BUILT_IN_NEARBYINT
:
6525 case BUILT_IN_NEARBYINTF
:
6526 case BUILT_IN_NEARBYINTL
:
6528 case BUILT_IN_RINTF
:
6529 case BUILT_IN_RINTL
:
6530 case BUILT_IN_ROUND
:
6531 case BUILT_IN_ROUNDF
:
6532 case BUILT_IN_ROUNDL
:
6533 case BUILT_IN_TRUNC
:
6534 case BUILT_IN_TRUNCF
:
6535 case BUILT_IN_TRUNCL
:
6549 /* EXP is assumed to be builtin call where truncation can be propagated
6550 across (for instance floor((double)f) == (double)floorf (f).
6551 Do the transformation. */
6554 fold_trunc_transparent_mathfn (tree fndecl
, tree arglist
)
6556 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
6559 if (! validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
6562 arg
= TREE_VALUE (arglist
);
6563 /* Integer rounding functions are idempotent. */
6564 if (fcode
== builtin_mathfn_code (arg
))
6567 /* If argument is already integer valued, and we don't need to worry
6568 about setting errno, there's no need to perform rounding. */
6569 if (! flag_errno_math
&& integer_valued_real_p (arg
))
6574 tree arg0
= strip_float_extensions (arg
);
6575 tree ftype
= TREE_TYPE (TREE_TYPE (fndecl
));
6576 tree newtype
= TREE_TYPE (arg0
);
6579 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
6580 && (decl
= mathfn_built_in (newtype
, fcode
)))
6583 build_tree_list (NULL_TREE
, fold_convert (newtype
, arg0
));
6584 return fold_convert (ftype
,
6585 build_function_call_expr (decl
, arglist
));
6591 /* EXP is assumed to be builtin call which can narrow the FP type of
6592 the argument, for instance lround((double)f) -> lroundf (f). */
6595 fold_fixed_mathfn (tree fndecl
, tree arglist
)
6597 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
6600 if (! validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
6603 arg
= TREE_VALUE (arglist
);
6605 /* If argument is already integer valued, and we don't need to worry
6606 about setting errno, there's no need to perform rounding. */
6607 if (! flag_errno_math
&& integer_valued_real_p (arg
))
6608 return fold (build1 (FIX_TRUNC_EXPR
, TREE_TYPE (TREE_TYPE (fndecl
)), arg
));
6612 tree ftype
= TREE_TYPE (arg
);
6613 tree arg0
= strip_float_extensions (arg
);
6614 tree newtype
= TREE_TYPE (arg0
);
6617 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
6618 && (decl
= mathfn_built_in (newtype
, fcode
)))
6621 build_tree_list (NULL_TREE
, fold_convert (newtype
, arg0
));
6622 return build_function_call_expr (decl
, arglist
);
6628 /* Fold function call to builtin cabs, cabsf or cabsl. ARGLIST
6629 is the argument list and TYPE is the return type. Return
6630 NULL_TREE if no if no simplification can be made. */
6633 fold_builtin_cabs (tree arglist
, tree type
)
6637 if (!arglist
|| TREE_CHAIN (arglist
))
6640 arg
= TREE_VALUE (arglist
);
6641 if (TREE_CODE (TREE_TYPE (arg
)) != COMPLEX_TYPE
6642 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
6645 /* Evaluate cabs of a constant at compile-time. */
6646 if (flag_unsafe_math_optimizations
6647 && TREE_CODE (arg
) == COMPLEX_CST
6648 && TREE_CODE (TREE_REALPART (arg
)) == REAL_CST
6649 && TREE_CODE (TREE_IMAGPART (arg
)) == REAL_CST
6650 && ! TREE_CONSTANT_OVERFLOW (TREE_REALPART (arg
))
6651 && ! TREE_CONSTANT_OVERFLOW (TREE_IMAGPART (arg
)))
6653 REAL_VALUE_TYPE r
, i
;
6655 r
= TREE_REAL_CST (TREE_REALPART (arg
));
6656 i
= TREE_REAL_CST (TREE_IMAGPART (arg
));
6658 real_arithmetic (&r
, MULT_EXPR
, &r
, &r
);
6659 real_arithmetic (&i
, MULT_EXPR
, &i
, &i
);
6660 real_arithmetic (&r
, PLUS_EXPR
, &r
, &i
);
6661 if (real_sqrt (&r
, TYPE_MODE (type
), &r
)
6662 || ! flag_trapping_math
)
6663 return build_real (type
, r
);
6666 /* If either part is zero, cabs is fabs of the other. */
6667 if (TREE_CODE (arg
) == COMPLEX_EXPR
6668 && real_zerop (TREE_OPERAND (arg
, 0)))
6669 return fold (build1 (ABS_EXPR
, type
, TREE_OPERAND (arg
, 1)));
6670 if (TREE_CODE (arg
) == COMPLEX_EXPR
6671 && real_zerop (TREE_OPERAND (arg
, 1)))
6672 return fold (build1 (ABS_EXPR
, type
, TREE_OPERAND (arg
, 0)));
6674 /* Don't do this when optimizing for size. */
6675 if (flag_unsafe_math_optimizations
6676 && optimize
&& !optimize_size
)
6678 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
6680 if (sqrtfn
!= NULL_TREE
)
6682 tree rpart
, ipart
, result
, arglist
;
6684 arg
= builtin_save_expr (arg
);
6686 rpart
= fold (build1 (REALPART_EXPR
, type
, arg
));
6687 ipart
= fold (build1 (IMAGPART_EXPR
, type
, arg
));
6689 rpart
= builtin_save_expr (rpart
);
6690 ipart
= builtin_save_expr (ipart
);
6692 result
= fold (build2 (PLUS_EXPR
, type
,
6693 fold (build2 (MULT_EXPR
, type
,
6695 fold (build2 (MULT_EXPR
, type
,
6698 arglist
= build_tree_list (NULL_TREE
, result
);
6699 return build_function_call_expr (sqrtfn
, arglist
);
6706 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl. Return
6707 NULL_TREE if no simplification can be made. */
6710 fold_builtin_sqrt (tree arglist
, tree type
)
6713 enum built_in_function fcode
;
6714 tree arg
= TREE_VALUE (arglist
);
6716 if (!validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
6719 /* Optimize sqrt of constant value. */
6720 if (TREE_CODE (arg
) == REAL_CST
6721 && ! TREE_CONSTANT_OVERFLOW (arg
))
6723 REAL_VALUE_TYPE r
, x
;
6725 x
= TREE_REAL_CST (arg
);
6726 if (real_sqrt (&r
, TYPE_MODE (type
), &x
)
6727 || (!flag_trapping_math
&& !flag_errno_math
))
6728 return build_real (type
, r
);
6731 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
6732 fcode
= builtin_mathfn_code (arg
);
6733 if (flag_unsafe_math_optimizations
&& BUILTIN_EXPONENT_P (fcode
))
6735 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
6736 arg
= fold (build2 (MULT_EXPR
, type
,
6737 TREE_VALUE (TREE_OPERAND (arg
, 1)),
6738 build_real (type
, dconsthalf
)));
6739 arglist
= build_tree_list (NULL_TREE
, arg
);
6740 return build_function_call_expr (expfn
, arglist
);
6743 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
6744 if (flag_unsafe_math_optimizations
&& BUILTIN_ROOT_P (fcode
))
6746 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
6750 tree arg0
= TREE_VALUE (TREE_OPERAND (arg
, 1));
6752 /* The inner root was either sqrt or cbrt. */
6753 REAL_VALUE_TYPE dconstroot
=
6754 BUILTIN_SQRT_P (fcode
) ? dconsthalf
: dconstthird
;
6756 /* Adjust for the outer root. */
6757 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
6758 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
6759 tree_root
= build_real (type
, dconstroot
);
6760 arglist
= tree_cons (NULL_TREE
, arg0
,
6761 build_tree_list (NULL_TREE
, tree_root
));
6762 return build_function_call_expr (powfn
, arglist
);
6766 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
6767 if (flag_unsafe_math_optimizations
6768 && (fcode
== BUILT_IN_POW
6769 || fcode
== BUILT_IN_POWF
6770 || fcode
== BUILT_IN_POWL
))
6772 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
6773 tree arg0
= TREE_VALUE (TREE_OPERAND (arg
, 1));
6774 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg
, 1)));
6776 if (!tree_expr_nonnegative_p (arg0
))
6777 arg0
= build1 (ABS_EXPR
, type
, arg0
);
6778 narg1
= fold (build2 (MULT_EXPR
, type
, arg1
,
6779 build_real (type
, dconsthalf
)));
6780 arglist
= tree_cons (NULL_TREE
, arg0
,
6781 build_tree_list (NULL_TREE
, narg1
));
6782 return build_function_call_expr (powfn
, arglist
);
6788 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl. Return
6789 NULL_TREE if no simplification can be made. */
6791 fold_builtin_cbrt (tree arglist
, tree type
)
6793 tree arg
= TREE_VALUE (arglist
);
6794 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
6796 if (!validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
6799 /* Optimize cbrt of constant value. */
6800 if (real_zerop (arg
) || real_onep (arg
) || real_minus_onep (arg
))
6803 if (flag_unsafe_math_optimizations
)
6805 /* Optimize cbrt(expN(x)) -> expN(x/3). */
6806 if (BUILTIN_EXPONENT_P (fcode
))
6808 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
6809 const REAL_VALUE_TYPE third_trunc
=
6810 real_value_truncate (TYPE_MODE (type
), dconstthird
);
6811 arg
= fold (build2 (MULT_EXPR
, type
,
6812 TREE_VALUE (TREE_OPERAND (arg
, 1)),
6813 build_real (type
, third_trunc
)));
6814 arglist
= build_tree_list (NULL_TREE
, arg
);
6815 return build_function_call_expr (expfn
, arglist
);
6818 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
6819 if (BUILTIN_SQRT_P (fcode
))
6821 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
6825 tree arg0
= TREE_VALUE (TREE_OPERAND (arg
, 1));
6827 REAL_VALUE_TYPE dconstroot
= dconstthird
;
6829 SET_REAL_EXP (&dconstroot
, REAL_EXP (&dconstroot
) - 1);
6830 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
6831 tree_root
= build_real (type
, dconstroot
);
6832 arglist
= tree_cons (NULL_TREE
, arg0
,
6833 build_tree_list (NULL_TREE
, tree_root
));
6834 return build_function_call_expr (powfn
, arglist
);
6838 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
6839 if (BUILTIN_CBRT_P (fcode
))
6841 tree arg0
= TREE_VALUE (TREE_OPERAND (arg
, 1));
6842 if (tree_expr_nonnegative_p (arg0
))
6844 tree powfn
= mathfn_built_in (type
, BUILT_IN_POW
);
6849 REAL_VALUE_TYPE dconstroot
;
6851 real_arithmetic (&dconstroot
, MULT_EXPR
, &dconstthird
, &dconstthird
);
6852 dconstroot
= real_value_truncate (TYPE_MODE (type
), dconstroot
);
6853 tree_root
= build_real (type
, dconstroot
);
6854 arglist
= tree_cons (NULL_TREE
, arg0
,
6855 build_tree_list (NULL_TREE
, tree_root
));
6856 return build_function_call_expr (powfn
, arglist
);
6861 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
6862 if (fcode
== BUILT_IN_POW
|| fcode
== BUILT_IN_POWF
6863 || fcode
== BUILT_IN_POWL
)
6865 tree arg00
= TREE_VALUE (TREE_OPERAND (arg
, 1));
6866 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg
, 1)));
6867 if (tree_expr_nonnegative_p (arg00
))
6869 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
6870 const REAL_VALUE_TYPE dconstroot
6871 = real_value_truncate (TYPE_MODE (type
), dconstthird
);
6872 tree narg01
= fold (build2 (MULT_EXPR
, type
, arg01
,
6873 build_real (type
, dconstroot
)));
6874 arglist
= tree_cons (NULL_TREE
, arg00
,
6875 build_tree_list (NULL_TREE
, narg01
));
6876 return build_function_call_expr (powfn
, arglist
);
6883 /* Fold function call to builtin sin, sinf, or sinl. Return
6884 NULL_TREE if no simplification can be made. */
6886 fold_builtin_sin (tree arglist
)
6888 tree arg
= TREE_VALUE (arglist
);
6890 if (!validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
6893 /* Optimize sin (0.0) = 0.0. */
6894 if (real_zerop (arg
))
6900 /* Fold function call to builtin cos, cosf, or cosl. Return
6901 NULL_TREE if no simplification can be made. */
6903 fold_builtin_cos (tree arglist
, tree type
, tree fndecl
)
6905 tree arg
= TREE_VALUE (arglist
);
6907 if (!validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
6910 /* Optimize cos (0.0) = 1.0. */
6911 if (real_zerop (arg
))
6912 return build_real (type
, dconst1
);
6914 /* Optimize cos(-x) into cos (x). */
6915 if (TREE_CODE (arg
) == NEGATE_EXPR
)
6917 tree args
= build_tree_list (NULL_TREE
,
6918 TREE_OPERAND (arg
, 0));
6919 return build_function_call_expr (fndecl
, args
);
6925 /* Fold function call to builtin tan, tanf, or tanl. Return
6926 NULL_TREE if no simplification can be made. */
6928 fold_builtin_tan (tree arglist
)
6930 enum built_in_function fcode
;
6931 tree arg
= TREE_VALUE (arglist
);
6933 if (!validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
6936 /* Optimize tan(0.0) = 0.0. */
6937 if (real_zerop (arg
))
6940 /* Optimize tan(atan(x)) = x. */
6941 fcode
= builtin_mathfn_code (arg
);
6942 if (flag_unsafe_math_optimizations
6943 && (fcode
== BUILT_IN_ATAN
6944 || fcode
== BUILT_IN_ATANF
6945 || fcode
== BUILT_IN_ATANL
))
6946 return TREE_VALUE (TREE_OPERAND (arg
, 1));
6951 /* Fold function call to builtin atan, atanf, or atanl. Return
6952 NULL_TREE if no simplification can be made. */
6955 fold_builtin_atan (tree arglist
, tree type
)
6958 tree arg
= TREE_VALUE (arglist
);
6960 if (!validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
6963 /* Optimize atan(0.0) = 0.0. */
6964 if (real_zerop (arg
))
6967 /* Optimize atan(1.0) = pi/4. */
6968 if (real_onep (arg
))
6970 REAL_VALUE_TYPE cst
;
6972 real_convert (&cst
, TYPE_MODE (type
), &dconstpi
);
6973 SET_REAL_EXP (&cst
, REAL_EXP (&cst
) - 2);
6974 return build_real (type
, cst
);
6980 /* Fold function call to builtin trunc, truncf or truncl. Return
6981 NULL_TREE if no simplification can be made. */
6984 fold_builtin_trunc (tree fndecl
, tree arglist
)
6988 if (! validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
6991 /* Optimize trunc of constant value. */
6992 arg
= TREE_VALUE (arglist
);
6993 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_CONSTANT_OVERFLOW (arg
))
6995 REAL_VALUE_TYPE r
, x
;
6996 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
6998 x
= TREE_REAL_CST (arg
);
6999 real_trunc (&r
, TYPE_MODE (type
), &x
);
7000 return build_real (type
, r
);
7003 return fold_trunc_transparent_mathfn (fndecl
, arglist
);
7006 /* Fold function call to builtin floor, floorf or floorl. Return
7007 NULL_TREE if no simplification can be made. */
7010 fold_builtin_floor (tree fndecl
, tree arglist
)
7014 if (! validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
7017 /* Optimize floor of constant value. */
7018 arg
= TREE_VALUE (arglist
);
7019 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_CONSTANT_OVERFLOW (arg
))
7023 x
= TREE_REAL_CST (arg
);
7024 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7026 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7029 real_floor (&r
, TYPE_MODE (type
), &x
);
7030 return build_real (type
, r
);
7034 return fold_trunc_transparent_mathfn (fndecl
, arglist
);
7037 /* Fold function call to builtin ceil, ceilf or ceill. Return
7038 NULL_TREE if no simplification can be made. */
7041 fold_builtin_ceil (tree fndecl
, tree arglist
)
7045 if (! validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
7048 /* Optimize ceil of constant value. */
7049 arg
= TREE_VALUE (arglist
);
7050 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_CONSTANT_OVERFLOW (arg
))
7054 x
= TREE_REAL_CST (arg
);
7055 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7057 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7060 real_ceil (&r
, TYPE_MODE (type
), &x
);
7061 return build_real (type
, r
);
7065 return fold_trunc_transparent_mathfn (fndecl
, arglist
);
7068 /* Fold function call to builtin round, roundf or roundl. Return
7069 NULL_TREE if no simplification can be made. */
7072 fold_builtin_round (tree fndecl
, tree arglist
)
7076 if (! validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
7079 /* Optimize round of constant value. */
7080 arg
= TREE_VALUE (arglist
);
7081 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_CONSTANT_OVERFLOW (arg
))
7085 x
= TREE_REAL_CST (arg
);
7086 if (! REAL_VALUE_ISNAN (x
) || ! flag_errno_math
)
7088 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7091 real_round (&r
, TYPE_MODE (type
), &x
);
7092 return build_real (type
, r
);
7096 return fold_trunc_transparent_mathfn (fndecl
, arglist
);
7099 /* Fold function call to builtin lround, lroundf or lroundl (or the
7100 corresponding long long versions) and other rounding functions.
7101 Return NULL_TREE if no simplification can be made. */
7104 fold_builtin_int_roundingfn (tree fndecl
, tree arglist
)
7108 if (! validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
7111 /* Optimize lround of constant value. */
7112 arg
= TREE_VALUE (arglist
);
7113 if (TREE_CODE (arg
) == REAL_CST
&& ! TREE_CONSTANT_OVERFLOW (arg
))
7115 const REAL_VALUE_TYPE x
= TREE_REAL_CST (arg
);
7117 if (! REAL_VALUE_ISNAN (x
) && ! REAL_VALUE_ISINF (x
))
7119 tree itype
= TREE_TYPE (TREE_TYPE (fndecl
));
7120 tree ftype
= TREE_TYPE (arg
), result
;
7121 HOST_WIDE_INT hi
, lo
;
7124 switch (DECL_FUNCTION_CODE (fndecl
))
7126 case BUILT_IN_LFLOOR
:
7127 case BUILT_IN_LFLOORF
:
7128 case BUILT_IN_LFLOORL
:
7129 case BUILT_IN_LLFLOOR
:
7130 case BUILT_IN_LLFLOORF
:
7131 case BUILT_IN_LLFLOORL
:
7132 real_floor (&r
, TYPE_MODE (ftype
), &x
);
7135 case BUILT_IN_LCEIL
:
7136 case BUILT_IN_LCEILF
:
7137 case BUILT_IN_LCEILL
:
7138 case BUILT_IN_LLCEIL
:
7139 case BUILT_IN_LLCEILF
:
7140 case BUILT_IN_LLCEILL
:
7141 real_ceil (&r
, TYPE_MODE (ftype
), &x
);
7144 case BUILT_IN_LROUND
:
7145 case BUILT_IN_LROUNDF
:
7146 case BUILT_IN_LROUNDL
:
7147 case BUILT_IN_LLROUND
:
7148 case BUILT_IN_LLROUNDF
:
7149 case BUILT_IN_LLROUNDL
:
7150 real_round (&r
, TYPE_MODE (ftype
), &x
);
7157 REAL_VALUE_TO_INT (&lo
, &hi
, r
);
7158 result
= build_int_cst_wide (NULL_TREE
, lo
, hi
);
7159 if (int_fits_type_p (result
, itype
))
7160 return fold_convert (itype
, result
);
7164 return fold_fixed_mathfn (fndecl
, arglist
);
7167 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
7168 and their long and long long variants (i.e. ffsl and ffsll).
7169 Return NULL_TREE if no simplification can be made. */
7172 fold_builtin_bitop (tree fndecl
, tree arglist
)
7176 if (! validate_arglist (arglist
, INTEGER_TYPE
, VOID_TYPE
))
7179 /* Optimize for constant argument. */
7180 arg
= TREE_VALUE (arglist
);
7181 if (TREE_CODE (arg
) == INTEGER_CST
&& ! TREE_CONSTANT_OVERFLOW (arg
))
7183 HOST_WIDE_INT hi
, width
, result
;
7184 unsigned HOST_WIDE_INT lo
;
7187 type
= TREE_TYPE (arg
);
7188 width
= TYPE_PRECISION (type
);
7189 lo
= TREE_INT_CST_LOW (arg
);
7191 /* Clear all the bits that are beyond the type's precision. */
7192 if (width
> HOST_BITS_PER_WIDE_INT
)
7194 hi
= TREE_INT_CST_HIGH (arg
);
7195 if (width
< 2 * HOST_BITS_PER_WIDE_INT
)
7196 hi
&= ~((HOST_WIDE_INT
) (-1) >> (width
- HOST_BITS_PER_WIDE_INT
));
7201 if (width
< HOST_BITS_PER_WIDE_INT
)
7202 lo
&= ~((unsigned HOST_WIDE_INT
) (-1) << width
);
7205 switch (DECL_FUNCTION_CODE (fndecl
))
7209 case BUILT_IN_FFSLL
:
7211 result
= exact_log2 (lo
& -lo
) + 1;
7213 result
= HOST_BITS_PER_WIDE_INT
+ exact_log2 (hi
& -hi
) + 1;
7220 case BUILT_IN_CLZLL
:
7222 result
= width
- floor_log2 (hi
) - 1 - HOST_BITS_PER_WIDE_INT
;
7224 result
= width
- floor_log2 (lo
) - 1;
7225 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
7231 case BUILT_IN_CTZLL
:
7233 result
= exact_log2 (lo
& -lo
);
7235 result
= HOST_BITS_PER_WIDE_INT
+ exact_log2 (hi
& -hi
);
7236 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type
), result
))
7240 case BUILT_IN_POPCOUNT
:
7241 case BUILT_IN_POPCOUNTL
:
7242 case BUILT_IN_POPCOUNTLL
:
7245 result
++, lo
&= lo
- 1;
7247 result
++, hi
&= hi
- 1;
7250 case BUILT_IN_PARITY
:
7251 case BUILT_IN_PARITYL
:
7252 case BUILT_IN_PARITYLL
:
7255 result
++, lo
&= lo
- 1;
7257 result
++, hi
&= hi
- 1;
7265 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl
)), result
);
7271 /* Return true if EXPR is the real constant contained in VALUE. */
7274 real_dconstp (tree expr
, const REAL_VALUE_TYPE
*value
)
7278 return ((TREE_CODE (expr
) == REAL_CST
7279 && ! TREE_CONSTANT_OVERFLOW (expr
)
7280 && REAL_VALUES_EQUAL (TREE_REAL_CST (expr
), *value
))
7281 || (TREE_CODE (expr
) == COMPLEX_CST
7282 && real_dconstp (TREE_REALPART (expr
), value
)
7283 && real_zerop (TREE_IMAGPART (expr
))));
7286 /* A subroutine of fold_builtin to fold the various logarithmic
7287 functions. EXP is the CALL_EXPR of a call to a builtin logN
7288 function. VALUE is the base of the logN function. */
7291 fold_builtin_logarithm (tree fndecl
, tree arglist
,
7292 const REAL_VALUE_TYPE
*value
)
7294 if (validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
7296 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7297 tree arg
= TREE_VALUE (arglist
);
7298 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7300 /* Optimize logN(1.0) = 0.0. */
7301 if (real_onep (arg
))
7302 return build_real (type
, dconst0
);
7304 /* Optimize logN(N) = 1.0. If N can't be truncated to MODE
7305 exactly, then only do this if flag_unsafe_math_optimizations. */
7306 if (exact_real_truncate (TYPE_MODE (type
), value
)
7307 || flag_unsafe_math_optimizations
)
7309 const REAL_VALUE_TYPE value_truncate
=
7310 real_value_truncate (TYPE_MODE (type
), *value
);
7311 if (real_dconstp (arg
, &value_truncate
))
7312 return build_real (type
, dconst1
);
7315 /* Special case, optimize logN(expN(x)) = x. */
7316 if (flag_unsafe_math_optimizations
7317 && ((value
== &dconste
7318 && (fcode
== BUILT_IN_EXP
7319 || fcode
== BUILT_IN_EXPF
7320 || fcode
== BUILT_IN_EXPL
))
7321 || (value
== &dconst2
7322 && (fcode
== BUILT_IN_EXP2
7323 || fcode
== BUILT_IN_EXP2F
7324 || fcode
== BUILT_IN_EXP2L
))
7325 || (value
== &dconst10
&& (BUILTIN_EXP10_P (fcode
)))))
7326 return fold_convert (type
, TREE_VALUE (TREE_OPERAND (arg
, 1)));
7328 /* Optimize logN(func()) for various exponential functions. We
7329 want to determine the value "x" and the power "exponent" in
7330 order to transform logN(x**exponent) into exponent*logN(x). */
7331 if (flag_unsafe_math_optimizations
)
7333 tree exponent
= 0, x
= 0;
7340 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
7341 x
= build_real (type
,
7342 real_value_truncate (TYPE_MODE (type
), dconste
));
7343 exponent
= TREE_VALUE (TREE_OPERAND (arg
, 1));
7346 case BUILT_IN_EXP2F
:
7347 case BUILT_IN_EXP2L
:
7348 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
7349 x
= build_real (type
, dconst2
);
7350 exponent
= TREE_VALUE (TREE_OPERAND (arg
, 1));
7352 case BUILT_IN_EXP10
:
7353 case BUILT_IN_EXP10F
:
7354 case BUILT_IN_EXP10L
:
7355 case BUILT_IN_POW10
:
7356 case BUILT_IN_POW10F
:
7357 case BUILT_IN_POW10L
:
7358 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
7359 x
= build_real (type
, dconst10
);
7360 exponent
= TREE_VALUE (TREE_OPERAND (arg
, 1));
7363 case BUILT_IN_SQRTF
:
7364 case BUILT_IN_SQRTL
:
7365 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
7366 x
= TREE_VALUE (TREE_OPERAND (arg
, 1));
7367 exponent
= build_real (type
, dconsthalf
);
7370 case BUILT_IN_CBRTF
:
7371 case BUILT_IN_CBRTL
:
7372 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
7373 x
= TREE_VALUE (TREE_OPERAND (arg
, 1));
7374 exponent
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
7380 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
7381 x
= TREE_VALUE (TREE_OPERAND (arg
, 1));
7382 exponent
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg
, 1)));
7388 /* Now perform the optimization. */
7392 arglist
= build_tree_list (NULL_TREE
, x
);
7393 logfn
= build_function_call_expr (fndecl
, arglist
);
7394 return fold (build2 (MULT_EXPR
, type
, exponent
, logfn
));
7402 /* Fold a builtin function call to pow, powf, or powl. Return
7403 NULL_TREE if no simplification can be made. */
7405 fold_builtin_pow (tree fndecl
, tree arglist
, tree type
)
7407 tree arg0
= TREE_VALUE (arglist
);
7408 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
7410 if (!validate_arglist (arglist
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
7413 /* Optimize pow(1.0,y) = 1.0. */
7414 if (real_onep (arg0
))
7415 return omit_one_operand (type
, build_real (type
, dconst1
), arg1
);
7417 if (TREE_CODE (arg1
) == REAL_CST
7418 && ! TREE_CONSTANT_OVERFLOW (arg1
))
7420 REAL_VALUE_TYPE cint
;
7424 c
= TREE_REAL_CST (arg1
);
7426 /* Optimize pow(x,0.0) = 1.0. */
7427 if (REAL_VALUES_EQUAL (c
, dconst0
))
7428 return omit_one_operand (type
, build_real (type
, dconst1
),
7431 /* Optimize pow(x,1.0) = x. */
7432 if (REAL_VALUES_EQUAL (c
, dconst1
))
7435 /* Optimize pow(x,-1.0) = 1.0/x. */
7436 if (REAL_VALUES_EQUAL (c
, dconstm1
))
7437 return fold (build2 (RDIV_EXPR
, type
,
7438 build_real (type
, dconst1
), arg0
));
7440 /* Optimize pow(x,0.5) = sqrt(x). */
7441 if (flag_unsafe_math_optimizations
7442 && REAL_VALUES_EQUAL (c
, dconsthalf
))
7444 tree sqrtfn
= mathfn_built_in (type
, BUILT_IN_SQRT
);
7446 if (sqrtfn
!= NULL_TREE
)
7448 tree arglist
= build_tree_list (NULL_TREE
, arg0
);
7449 return build_function_call_expr (sqrtfn
, arglist
);
7453 /* Check for an integer exponent. */
7454 n
= real_to_integer (&c
);
7455 real_from_integer (&cint
, VOIDmode
, n
, n
< 0 ? -1 : 0, 0);
7456 if (real_identical (&c
, &cint
))
7458 /* Attempt to evaluate pow at compile-time. */
7459 if (TREE_CODE (arg0
) == REAL_CST
7460 && ! TREE_CONSTANT_OVERFLOW (arg0
))
7465 x
= TREE_REAL_CST (arg0
);
7466 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
7467 if (flag_unsafe_math_optimizations
|| !inexact
)
7468 return build_real (type
, x
);
7471 /* Strip sign ops from even integer powers. */
7472 if ((n
& 1) == 0 && flag_unsafe_math_optimizations
)
7474 tree narg0
= fold_strip_sign_ops (arg0
);
7477 arglist
= build_tree_list (NULL_TREE
, arg1
);
7478 arglist
= tree_cons (NULL_TREE
, narg0
, arglist
);
7479 return build_function_call_expr (fndecl
, arglist
);
7485 if (flag_unsafe_math_optimizations
)
7487 const enum built_in_function fcode
= builtin_mathfn_code (arg0
);
7489 /* Optimize pow(expN(x),y) = expN(x*y). */
7490 if (BUILTIN_EXPONENT_P (fcode
))
7492 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
7493 tree arg
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7494 arg
= fold (build2 (MULT_EXPR
, type
, arg
, arg1
));
7495 arglist
= build_tree_list (NULL_TREE
, arg
);
7496 return build_function_call_expr (expfn
, arglist
);
7499 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
7500 if (BUILTIN_SQRT_P (fcode
))
7502 tree narg0
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7503 tree narg1
= fold (build2 (MULT_EXPR
, type
, arg1
,
7504 build_real (type
, dconsthalf
)));
7506 arglist
= tree_cons (NULL_TREE
, narg0
,
7507 build_tree_list (NULL_TREE
, narg1
));
7508 return build_function_call_expr (fndecl
, arglist
);
7511 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
7512 if (BUILTIN_CBRT_P (fcode
))
7514 tree arg
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7515 if (tree_expr_nonnegative_p (arg
))
7517 const REAL_VALUE_TYPE dconstroot
7518 = real_value_truncate (TYPE_MODE (type
), dconstthird
);
7519 tree narg1
= fold (build2 (MULT_EXPR
, type
, arg1
,
7520 build_real (type
, dconstroot
)));
7521 arglist
= tree_cons (NULL_TREE
, arg
,
7522 build_tree_list (NULL_TREE
, narg1
));
7523 return build_function_call_expr (fndecl
, arglist
);
7527 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
7528 if (fcode
== BUILT_IN_POW
|| fcode
== BUILT_IN_POWF
7529 || fcode
== BUILT_IN_POWL
)
7531 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
7532 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
, 1)));
7533 tree narg1
= fold (build2 (MULT_EXPR
, type
, arg01
, arg1
));
7534 arglist
= tree_cons (NULL_TREE
, arg00
,
7535 build_tree_list (NULL_TREE
, narg1
));
7536 return build_function_call_expr (fndecl
, arglist
);
7543 /* Fold a builtin function call to powi, powif, or powil. Return
7544 NULL_TREE if no simplification can be made. */
7546 fold_builtin_powi (tree fndecl ATTRIBUTE_UNUSED
, tree arglist
, tree type
)
7548 tree arg0
= TREE_VALUE (arglist
);
7549 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
7551 if (!validate_arglist (arglist
, REAL_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
7554 /* Optimize pow(1.0,y) = 1.0. */
7555 if (real_onep (arg0
))
7556 return omit_one_operand (type
, build_real (type
, dconst1
), arg1
);
7558 if (host_integerp (arg1
, 0))
7560 HOST_WIDE_INT c
= TREE_INT_CST_LOW (arg1
);
7562 /* Evaluate powi at compile-time. */
7563 if (TREE_CODE (arg0
) == REAL_CST
7564 && ! TREE_CONSTANT_OVERFLOW (arg0
))
7567 x
= TREE_REAL_CST (arg0
);
7568 real_powi (&x
, TYPE_MODE (type
), &x
, c
);
7569 return build_real (type
, x
);
7572 /* Optimize pow(x,0) = 1.0. */
7574 return omit_one_operand (type
, build_real (type
, dconst1
),
7577 /* Optimize pow(x,1) = x. */
7581 /* Optimize pow(x,-1) = 1.0/x. */
7583 return fold (build2 (RDIV_EXPR
, type
,
7584 build_real (type
, dconst1
), arg0
));
7590 /* A subroutine of fold_builtin to fold the various exponent
7591 functions. EXP is the CALL_EXPR of a call to a builtin function.
7592 VALUE is the value which will be raised to a power. */
7595 fold_builtin_exponent (tree fndecl
, tree arglist
,
7596 const REAL_VALUE_TYPE
*value
)
7598 if (validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
7600 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
7601 tree arg
= TREE_VALUE (arglist
);
7603 /* Optimize exp*(0.0) = 1.0. */
7604 if (real_zerop (arg
))
7605 return build_real (type
, dconst1
);
7607 /* Optimize expN(1.0) = N. */
7608 if (real_onep (arg
))
7610 REAL_VALUE_TYPE cst
;
7612 real_convert (&cst
, TYPE_MODE (type
), value
);
7613 return build_real (type
, cst
);
7616 /* Attempt to evaluate expN(integer) at compile-time. */
7617 if (flag_unsafe_math_optimizations
7618 && TREE_CODE (arg
) == REAL_CST
7619 && ! TREE_CONSTANT_OVERFLOW (arg
))
7621 REAL_VALUE_TYPE cint
;
7625 c
= TREE_REAL_CST (arg
);
7626 n
= real_to_integer (&c
);
7627 real_from_integer (&cint
, VOIDmode
, n
,
7629 if (real_identical (&c
, &cint
))
7633 real_powi (&x
, TYPE_MODE (type
), value
, n
);
7634 return build_real (type
, x
);
7638 /* Optimize expN(logN(x)) = x. */
7639 if (flag_unsafe_math_optimizations
)
7641 const enum built_in_function fcode
= builtin_mathfn_code (arg
);
7643 if ((value
== &dconste
7644 && (fcode
== BUILT_IN_LOG
7645 || fcode
== BUILT_IN_LOGF
7646 || fcode
== BUILT_IN_LOGL
))
7647 || (value
== &dconst2
7648 && (fcode
== BUILT_IN_LOG2
7649 || fcode
== BUILT_IN_LOG2F
7650 || fcode
== BUILT_IN_LOG2L
))
7651 || (value
== &dconst10
7652 && (fcode
== BUILT_IN_LOG10
7653 || fcode
== BUILT_IN_LOG10F
7654 || fcode
== BUILT_IN_LOG10L
)))
7655 return fold_convert (type
, TREE_VALUE (TREE_OPERAND (arg
, 1)));
7662 /* Fold function call to builtin memcpy. Return
7663 NULL_TREE if no simplification can be made. */
7666 fold_builtin_memcpy (tree fndecl
, tree arglist
)
7668 tree dest
, src
, len
;
7670 if (!validate_arglist (arglist
,
7671 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
7674 dest
= TREE_VALUE (arglist
);
7675 src
= TREE_VALUE (TREE_CHAIN (arglist
));
7676 len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
7678 /* If the LEN parameter is zero, return DEST. */
7679 if (integer_zerop (len
))
7680 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
7682 /* If SRC and DEST are the same (and not volatile), return DEST. */
7683 if (operand_equal_p (src
, dest
, 0))
7684 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, len
);
7689 /* Fold function call to builtin mempcpy. Return
7690 NULL_TREE if no simplification can be made. */
7693 fold_builtin_mempcpy (tree arglist
, tree type
, int endp
)
7695 if (validate_arglist (arglist
,
7696 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
7698 tree dest
= TREE_VALUE (arglist
);
7699 tree src
= TREE_VALUE (TREE_CHAIN (arglist
));
7700 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
7702 /* If the LEN parameter is zero, return DEST. */
7703 if (integer_zerop (len
))
7704 return omit_one_operand (type
, dest
, src
);
7706 /* If SRC and DEST are the same (and not volatile), return DEST+LEN. */
7707 if (operand_equal_p (src
, dest
, 0))
7710 return omit_one_operand (type
, dest
, len
);
7713 len
= fold (build2 (MINUS_EXPR
, TREE_TYPE (len
), len
,
7716 len
= fold_convert (TREE_TYPE (dest
), len
);
7717 len
= fold (build2 (PLUS_EXPR
, TREE_TYPE (dest
), dest
, len
));
7718 return fold_convert (type
, len
);
7724 /* Fold function call to builtin memmove. Return
7725 NULL_TREE if no simplification can be made. */
7728 fold_builtin_memmove (tree arglist
, tree type
)
7730 tree dest
, src
, len
;
7732 if (!validate_arglist (arglist
,
7733 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
7736 dest
= TREE_VALUE (arglist
);
7737 src
= TREE_VALUE (TREE_CHAIN (arglist
));
7738 len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
7740 /* If the LEN parameter is zero, return DEST. */
7741 if (integer_zerop (len
))
7742 return omit_one_operand (type
, dest
, src
);
7744 /* If SRC and DEST are the same (and not volatile), return DEST. */
7745 if (operand_equal_p (src
, dest
, 0))
7746 return omit_one_operand (type
, dest
, len
);
7751 /* Fold function call to builtin strcpy. If LEN is not NULL, it represents
7752 the length of the string to be copied. Return NULL_TREE if no
7753 simplification can be made. */
7756 fold_builtin_strcpy (tree fndecl
, tree arglist
, tree len
)
7760 if (!validate_arglist (arglist
,
7761 POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
7764 dest
= TREE_VALUE (arglist
);
7765 src
= TREE_VALUE (TREE_CHAIN (arglist
));
7767 /* If SRC and DEST are the same (and not volatile), return DEST. */
7768 if (operand_equal_p (src
, dest
, 0))
7769 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)), dest
);
7774 fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
7780 len
= c_strlen (src
, 1);
7781 if (! len
|| TREE_SIDE_EFFECTS (len
))
7785 len
= size_binop (PLUS_EXPR
, len
, ssize_int (1));
7786 arglist
= build_tree_list (NULL_TREE
, len
);
7787 arglist
= tree_cons (NULL_TREE
, src
, arglist
);
7788 arglist
= tree_cons (NULL_TREE
, dest
, arglist
);
7789 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)),
7790 build_function_call_expr (fn
, arglist
));
7793 /* Fold function call to builtin strncpy. If SLEN is not NULL, it represents
7794 the length of the source string. Return NULL_TREE if no simplification
7798 fold_builtin_strncpy (tree fndecl
, tree arglist
, tree slen
)
7800 tree dest
, src
, len
, fn
;
7802 if (!validate_arglist (arglist
,
7803 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
7806 dest
= TREE_VALUE (arglist
);
7807 src
= TREE_VALUE (TREE_CHAIN (arglist
));
7808 len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
7810 /* If the LEN parameter is zero, return DEST. */
7811 if (integer_zerop (len
))
7812 return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl
)), dest
, src
);
7814 /* We can't compare slen with len as constants below if len is not a
7816 if (len
== 0 || TREE_CODE (len
) != INTEGER_CST
)
7820 slen
= c_strlen (src
, 1);
7822 /* Now, we must be passed a constant src ptr parameter. */
7823 if (slen
== 0 || TREE_CODE (slen
) != INTEGER_CST
)
7826 slen
= size_binop (PLUS_EXPR
, slen
, ssize_int (1));
7828 /* We do not support simplification of this case, though we do
7829 support it when expanding trees into RTL. */
7830 /* FIXME: generate a call to __builtin_memset. */
7831 if (tree_int_cst_lt (slen
, len
))
7834 /* OK transform into builtin memcpy. */
7835 fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
7838 return fold_convert (TREE_TYPE (TREE_TYPE (fndecl
)),
7839 build_function_call_expr (fn
, arglist
));
7842 /* Fold function call to builtin memcmp. Return
7843 NULL_TREE if no simplification can be made. */
7846 fold_builtin_memcmp (tree arglist
)
7848 tree arg1
, arg2
, len
;
7849 const char *p1
, *p2
;
7851 if (!validate_arglist (arglist
,
7852 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
7855 arg1
= TREE_VALUE (arglist
);
7856 arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
7857 len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
7859 /* If the LEN parameter is zero, return zero. */
7860 if (integer_zerop (len
))
7861 return omit_two_operands (integer_type_node
, integer_zero_node
,
7864 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7865 if (operand_equal_p (arg1
, arg2
, 0))
7866 return omit_one_operand (integer_type_node
, integer_zero_node
, len
);
7868 p1
= c_getstr (arg1
);
7869 p2
= c_getstr (arg2
);
7871 /* If all arguments are constant, and the value of len is not greater
7872 than the lengths of arg1 and arg2, evaluate at compile-time. */
7873 if (host_integerp (len
, 1) && p1
&& p2
7874 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
7875 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
7877 const int r
= memcmp (p1
, p2
, tree_low_cst (len
, 1));
7880 return integer_one_node
;
7882 return integer_minus_one_node
;
7884 return integer_zero_node
;
7887 /* If len parameter is one, return an expression corresponding to
7888 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7889 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
7891 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
7892 tree cst_uchar_ptr_node
= build_pointer_type (cst_uchar_node
);
7893 tree ind1
= fold_convert (integer_type_node
,
7894 build1 (INDIRECT_REF
, cst_uchar_node
,
7895 fold_convert (cst_uchar_ptr_node
,
7897 tree ind2
= fold_convert (integer_type_node
,
7898 build1 (INDIRECT_REF
, cst_uchar_node
,
7899 fold_convert (cst_uchar_ptr_node
,
7901 return fold (build2 (MINUS_EXPR
, integer_type_node
, ind1
, ind2
));
7907 /* Fold function call to builtin strcmp. Return
7908 NULL_TREE if no simplification can be made. */
7911 fold_builtin_strcmp (tree arglist
)
7914 const char *p1
, *p2
;
7916 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
7919 arg1
= TREE_VALUE (arglist
);
7920 arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
7922 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7923 if (operand_equal_p (arg1
, arg2
, 0))
7924 return integer_zero_node
;
7926 p1
= c_getstr (arg1
);
7927 p2
= c_getstr (arg2
);
7931 const int i
= strcmp (p1
, p2
);
7933 return integer_minus_one_node
;
7935 return integer_one_node
;
7937 return integer_zero_node
;
7940 /* If the second arg is "", return *(const unsigned char*)arg1. */
7941 if (p2
&& *p2
== '\0')
7943 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
7944 tree cst_uchar_ptr_node
= build_pointer_type (cst_uchar_node
);
7945 return fold_convert (integer_type_node
,
7946 build1 (INDIRECT_REF
, cst_uchar_node
,
7947 fold_convert (cst_uchar_ptr_node
,
7951 /* If the first arg is "", return -*(const unsigned char*)arg2. */
7952 if (p1
&& *p1
== '\0')
7954 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
7955 tree cst_uchar_ptr_node
= build_pointer_type (cst_uchar_node
);
7956 tree temp
= fold_convert (integer_type_node
,
7957 build1 (INDIRECT_REF
, cst_uchar_node
,
7958 fold_convert (cst_uchar_ptr_node
,
7960 return fold (build1 (NEGATE_EXPR
, integer_type_node
, temp
));
7966 /* Fold function call to builtin strncmp. Return
7967 NULL_TREE if no simplification can be made. */
7970 fold_builtin_strncmp (tree arglist
)
7972 tree arg1
, arg2
, len
;
7973 const char *p1
, *p2
;
7975 if (!validate_arglist (arglist
,
7976 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
7979 arg1
= TREE_VALUE (arglist
);
7980 arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
7981 len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
7983 /* If the LEN parameter is zero, return zero. */
7984 if (integer_zerop (len
))
7985 return omit_two_operands (integer_type_node
, integer_zero_node
,
7988 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7989 if (operand_equal_p (arg1
, arg2
, 0))
7990 return omit_one_operand (integer_type_node
, integer_zero_node
, len
);
7992 p1
= c_getstr (arg1
);
7993 p2
= c_getstr (arg2
);
7995 if (host_integerp (len
, 1) && p1
&& p2
)
7997 const int i
= strncmp (p1
, p2
, tree_low_cst (len
, 1));
7999 return integer_one_node
;
8001 return integer_minus_one_node
;
8003 return integer_zero_node
;
8006 /* If the second arg is "", and the length is greater than zero,
8007 return *(const unsigned char*)arg1. */
8008 if (p2
&& *p2
== '\0'
8009 && TREE_CODE (len
) == INTEGER_CST
8010 && tree_int_cst_sgn (len
) == 1)
8012 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8013 tree cst_uchar_ptr_node
= build_pointer_type (cst_uchar_node
);
8014 return fold_convert (integer_type_node
,
8015 build1 (INDIRECT_REF
, cst_uchar_node
,
8016 fold_convert (cst_uchar_ptr_node
,
8020 /* If the first arg is "", and the length is greater than zero,
8021 return -*(const unsigned char*)arg2. */
8022 if (p1
&& *p1
== '\0'
8023 && TREE_CODE (len
) == INTEGER_CST
8024 && tree_int_cst_sgn (len
) == 1)
8026 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8027 tree cst_uchar_ptr_node
= build_pointer_type (cst_uchar_node
);
8028 tree temp
= fold_convert (integer_type_node
,
8029 build1 (INDIRECT_REF
, cst_uchar_node
,
8030 fold_convert (cst_uchar_ptr_node
,
8032 return fold (build1 (NEGATE_EXPR
, integer_type_node
, temp
));
8035 /* If len parameter is one, return an expression corresponding to
8036 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8037 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
8039 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
8040 tree cst_uchar_ptr_node
= build_pointer_type (cst_uchar_node
);
8041 tree ind1
= fold_convert (integer_type_node
,
8042 build1 (INDIRECT_REF
, cst_uchar_node
,
8043 fold_convert (cst_uchar_ptr_node
,
8045 tree ind2
= fold_convert (integer_type_node
,
8046 build1 (INDIRECT_REF
, cst_uchar_node
,
8047 fold_convert (cst_uchar_ptr_node
,
8049 return fold (build2 (MINUS_EXPR
, integer_type_node
, ind1
, ind2
));
8055 /* Fold function call to builtin signbit, signbitf or signbitl. Return
8056 NULL_TREE if no simplification can be made. */
8059 fold_builtin_signbit (tree fndecl
, tree arglist
)
8061 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8064 if (!validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
8067 arg
= TREE_VALUE (arglist
);
8069 /* If ARG is a compile-time constant, determine the result. */
8070 if (TREE_CODE (arg
) == REAL_CST
8071 && !TREE_CONSTANT_OVERFLOW (arg
))
8075 c
= TREE_REAL_CST (arg
);
8076 temp
= REAL_VALUE_NEGATIVE (c
) ? integer_one_node
: integer_zero_node
;
8077 return fold_convert (type
, temp
);
8080 /* If ARG is non-negative, the result is always zero. */
8081 if (tree_expr_nonnegative_p (arg
))
8082 return omit_one_operand (type
, integer_zero_node
, arg
);
8084 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
8085 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg
))))
8086 return fold (build2 (LT_EXPR
, type
, arg
,
8087 build_real (TREE_TYPE (arg
), dconst0
)));
8092 /* Fold function call to builtin copysign, copysignf or copysignl.
8093 Return NULL_TREE if no simplification can be made. */
8096 fold_builtin_copysign (tree fndecl
, tree arglist
, tree type
)
8098 tree arg1
, arg2
, tem
;
8100 if (!validate_arglist (arglist
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
8103 arg1
= TREE_VALUE (arglist
);
8104 arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
8106 /* copysign(X,X) is X. */
8107 if (operand_equal_p (arg1
, arg2
, 0))
8108 return fold_convert (type
, arg1
);
8110 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8111 if (TREE_CODE (arg1
) == REAL_CST
8112 && TREE_CODE (arg2
) == REAL_CST
8113 && !TREE_CONSTANT_OVERFLOW (arg1
)
8114 && !TREE_CONSTANT_OVERFLOW (arg2
))
8116 REAL_VALUE_TYPE c1
, c2
;
8118 c1
= TREE_REAL_CST (arg1
);
8119 c2
= TREE_REAL_CST (arg2
);
8120 real_copysign (&c1
, &c2
);
8121 return build_real (type
, c1
);
8125 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8126 Remember to evaluate Y for side-effects. */
8127 if (tree_expr_nonnegative_p (arg2
))
8128 return omit_one_operand (type
,
8129 fold (build1 (ABS_EXPR
, type
, arg1
)),
8132 /* Strip sign changing operations for the first argument. */
8133 tem
= fold_strip_sign_ops (arg1
);
8136 arglist
= tree_cons (NULL_TREE
, tem
, TREE_CHAIN (arglist
));
8137 return build_function_call_expr (fndecl
, arglist
);
8143 /* Fold a call to builtin isascii. */
8146 fold_builtin_isascii (tree arglist
)
8148 if (! validate_arglist (arglist
, INTEGER_TYPE
, VOID_TYPE
))
8152 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8153 tree arg
= TREE_VALUE (arglist
);
8155 arg
= build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
8156 build_int_cst (NULL_TREE
,
8157 ~ (unsigned HOST_WIDE_INT
) 0x7f));
8158 arg
= fold (build2 (EQ_EXPR
, integer_type_node
,
8159 arg
, integer_zero_node
));
8161 if (in_gimple_form
&& !TREE_CONSTANT (arg
))
8168 /* Fold a call to builtin toascii. */
8171 fold_builtin_toascii (tree arglist
)
8173 if (! validate_arglist (arglist
, INTEGER_TYPE
, VOID_TYPE
))
8177 /* Transform toascii(c) -> (c & 0x7f). */
8178 tree arg
= TREE_VALUE (arglist
);
8180 return fold (build2 (BIT_AND_EXPR
, integer_type_node
, arg
,
8181 build_int_cst (NULL_TREE
, 0x7f)));
8185 /* Fold a call to builtin isdigit. */
8188 fold_builtin_isdigit (tree arglist
)
8190 if (! validate_arglist (arglist
, INTEGER_TYPE
, VOID_TYPE
))
8194 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8195 /* According to the C standard, isdigit is unaffected by locale.
8196 However, it definitely is affected by the target character set. */
8198 unsigned HOST_WIDE_INT target_digit0
8199 = lang_hooks
.to_target_charset ('0');
8201 if (target_digit0
== 0)
8204 arg
= fold_convert (unsigned_type_node
, TREE_VALUE (arglist
));
8205 arg
= build2 (MINUS_EXPR
, unsigned_type_node
, arg
,
8206 build_int_cst (unsigned_type_node
, target_digit0
));
8207 arg
= build2 (LE_EXPR
, integer_type_node
, arg
,
8208 build_int_cst (unsigned_type_node
, 9));
8210 if (in_gimple_form
&& !TREE_CONSTANT (arg
))
8217 /* Fold a call to fabs, fabsf or fabsl. */
8220 fold_builtin_fabs (tree arglist
, tree type
)
8224 if (!validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
8227 arg
= TREE_VALUE (arglist
);
8228 arg
= fold_convert (type
, arg
);
8229 if (TREE_CODE (arg
) == REAL_CST
)
8230 return fold_abs_const (arg
, type
);
8231 return fold (build1 (ABS_EXPR
, type
, arg
));
8234 /* Fold a call to abs, labs, llabs or imaxabs. */
8237 fold_builtin_abs (tree arglist
, tree type
)
8241 if (!validate_arglist (arglist
, INTEGER_TYPE
, VOID_TYPE
))
8244 arg
= TREE_VALUE (arglist
);
8245 arg
= fold_convert (type
, arg
);
8246 if (TREE_CODE (arg
) == INTEGER_CST
)
8247 return fold_abs_const (arg
, type
);
8248 return fold (build1 (ABS_EXPR
, type
, arg
));
8251 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8252 EXP is the CALL_EXPR for the call. */
8255 fold_builtin_classify (tree fndecl
, tree arglist
, int builtin_index
)
8257 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8261 if (!validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
8263 /* Check that we have exactly one argument. */
8266 error ("too few arguments to function %qs",
8267 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
8268 return error_mark_node
;
8270 else if (TREE_CHAIN (arglist
) != 0)
8272 error ("too many arguments to function %qs",
8273 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
8274 return error_mark_node
;
8278 error ("non-floating-point argument to function %qs",
8279 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
8280 return error_mark_node
;
8284 arg
= TREE_VALUE (arglist
);
8285 switch (builtin_index
)
8287 case BUILT_IN_ISINF
:
8288 if (!MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
8289 return omit_one_operand (type
, integer_zero_node
, arg
);
8291 if (TREE_CODE (arg
) == REAL_CST
)
8293 r
= TREE_REAL_CST (arg
);
8294 if (real_isinf (&r
))
8295 return real_compare (GT_EXPR
, &r
, &dconst0
)
8296 ? integer_one_node
: integer_minus_one_node
;
8298 return integer_zero_node
;
8303 case BUILT_IN_FINITE
:
8304 if (!MODE_HAS_NANS (TYPE_MODE (TREE_TYPE (arg
)))
8305 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg
))))
8306 return omit_one_operand (type
, integer_zero_node
, arg
);
8308 if (TREE_CODE (arg
) == REAL_CST
)
8310 r
= TREE_REAL_CST (arg
);
8311 return real_isinf (&r
) || real_isnan (&r
)
8312 ? integer_zero_node
: integer_one_node
;
8317 case BUILT_IN_ISNAN
:
8318 if (!MODE_HAS_NANS (TYPE_MODE (TREE_TYPE (arg
))))
8319 return omit_one_operand (type
, integer_zero_node
, arg
);
8321 if (TREE_CODE (arg
) == REAL_CST
)
8323 r
= TREE_REAL_CST (arg
);
8324 return real_isnan (&r
) ? integer_one_node
: integer_zero_node
;
8327 arg
= builtin_save_expr (arg
);
8328 return fold (build2 (UNORDERED_EXPR
, type
, arg
, arg
));
8335 /* Fold a call to an unordered comparison function such as
8336 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8337 being called and ARGLIST is the argument list for the call.
8338 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8339 the opposite of the desired result. UNORDERED_CODE is used
8340 for modes that can hold NaNs and ORDERED_CODE is used for
8344 fold_builtin_unordered_cmp (tree fndecl
, tree arglist
,
8345 enum tree_code unordered_code
,
8346 enum tree_code ordered_code
)
8348 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8349 enum tree_code code
;
8352 enum tree_code code0
, code1
;
8353 tree cmp_type
= NULL_TREE
;
8355 if (!validate_arglist (arglist
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
8357 /* Check that we have exactly two arguments. */
8358 if (arglist
== 0 || TREE_CHAIN (arglist
) == 0)
8360 error ("too few arguments to function %qs",
8361 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
8362 return error_mark_node
;
8364 else if (TREE_CHAIN (TREE_CHAIN (arglist
)) != 0)
8366 error ("too many arguments to function %qs",
8367 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
8368 return error_mark_node
;
8372 arg0
= TREE_VALUE (arglist
);
8373 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
8375 type0
= TREE_TYPE (arg0
);
8376 type1
= TREE_TYPE (arg1
);
8378 code0
= TREE_CODE (type0
);
8379 code1
= TREE_CODE (type1
);
8381 if (code0
== REAL_TYPE
&& code1
== REAL_TYPE
)
8382 /* Choose the wider of two real types. */
8383 cmp_type
= TYPE_PRECISION (type0
) >= TYPE_PRECISION (type1
)
8385 else if (code0
== REAL_TYPE
&& code1
== INTEGER_TYPE
)
8387 else if (code0
== INTEGER_TYPE
&& code1
== REAL_TYPE
)
8391 error ("non-floating-point argument to function %qs",
8392 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
8393 return error_mark_node
;
8396 arg0
= fold_convert (cmp_type
, arg0
);
8397 arg1
= fold_convert (cmp_type
, arg1
);
8399 if (unordered_code
== UNORDERED_EXPR
)
8401 if (!MODE_HAS_NANS (TYPE_MODE (TREE_TYPE (arg0
))))
8402 return omit_two_operands (type
, integer_zero_node
, arg0
, arg1
);
8403 return fold (build2 (UNORDERED_EXPR
, type
, arg0
, arg1
));
8406 code
= MODE_HAS_NANS (TYPE_MODE (TREE_TYPE (arg0
))) ? unordered_code
8408 return fold (build1 (TRUTH_NOT_EXPR
, type
,
8409 fold (build2 (code
, type
, arg0
, arg1
))));
8412 /* Fold a call to one of the external complex multiply libcalls. */
8415 fold_builtin_complex_mul (tree type
, tree arglist
)
8417 tree ar
, ai
, br
, bi
;
8419 if (!validate_arglist (arglist
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
,
8420 REAL_TYPE
, VOID_TYPE
))
8423 ar
= TREE_VALUE (arglist
); arglist
= TREE_CHAIN (arglist
);
8424 ai
= TREE_VALUE (arglist
); arglist
= TREE_CHAIN (arglist
);
8425 br
= TREE_VALUE (arglist
); arglist
= TREE_CHAIN (arglist
);
8426 bi
= TREE_VALUE (arglist
);
8428 return fold_complex_mult_parts (type
, ar
, ai
, br
, bi
);
8431 /* Fold a call to one of the external complex division libcalls. */
8434 fold_builtin_complex_div (tree type
, tree arglist
)
8436 tree ar
, ai
, br
, bi
;
8438 if (!validate_arglist (arglist
, REAL_TYPE
, REAL_TYPE
, REAL_TYPE
,
8439 REAL_TYPE
, VOID_TYPE
))
8442 ar
= TREE_VALUE (arglist
); arglist
= TREE_CHAIN (arglist
);
8443 ai
= TREE_VALUE (arglist
); arglist
= TREE_CHAIN (arglist
);
8444 br
= TREE_VALUE (arglist
); arglist
= TREE_CHAIN (arglist
);
8445 bi
= TREE_VALUE (arglist
);
8447 return fold_complex_div_parts (type
, ar
, ai
, br
, bi
, RDIV_EXPR
);
8450 /* Used by constant folding to simplify calls to builtin functions. EXP is
8451 the CALL_EXPR of a call to a builtin function. IGNORE is true if the
8452 result of the function call is ignored. This function returns NULL_TREE
8453 if no simplification was possible. */
8456 fold_builtin_1 (tree fndecl
, tree arglist
, bool ignore
)
8458 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
8459 enum built_in_function fcode
;
8461 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
8462 return targetm
.fold_builtin (fndecl
, arglist
, ignore
);
8464 fcode
= DECL_FUNCTION_CODE (fndecl
);
8467 case BUILT_IN_FPUTS
:
8468 return fold_builtin_fputs (arglist
, ignore
, false, NULL_TREE
);
8470 case BUILT_IN_FPUTS_UNLOCKED
:
8471 return fold_builtin_fputs (arglist
, ignore
, true, NULL_TREE
);
8473 case BUILT_IN_STRSTR
:
8474 return fold_builtin_strstr (arglist
, type
);
8476 case BUILT_IN_STRCAT
:
8477 return fold_builtin_strcat (arglist
);
8479 case BUILT_IN_STRNCAT
:
8480 return fold_builtin_strncat (arglist
);
8482 case BUILT_IN_STRSPN
:
8483 return fold_builtin_strspn (arglist
);
8485 case BUILT_IN_STRCSPN
:
8486 return fold_builtin_strcspn (arglist
);
8488 case BUILT_IN_STRCHR
:
8489 case BUILT_IN_INDEX
:
8490 return fold_builtin_strchr (arglist
, type
);
8492 case BUILT_IN_STRRCHR
:
8493 case BUILT_IN_RINDEX
:
8494 return fold_builtin_strrchr (arglist
, type
);
8496 case BUILT_IN_STRCPY
:
8497 return fold_builtin_strcpy (fndecl
, arglist
, NULL_TREE
);
8499 case BUILT_IN_STRNCPY
:
8500 return fold_builtin_strncpy (fndecl
, arglist
, NULL_TREE
);
8502 case BUILT_IN_STRCMP
:
8503 return fold_builtin_strcmp (arglist
);
8505 case BUILT_IN_STRNCMP
:
8506 return fold_builtin_strncmp (arglist
);
8508 case BUILT_IN_STRPBRK
:
8509 return fold_builtin_strpbrk (arglist
, type
);
8512 case BUILT_IN_MEMCMP
:
8513 return fold_builtin_memcmp (arglist
);
8515 case BUILT_IN_SPRINTF
:
8516 return fold_builtin_sprintf (arglist
, ignore
);
8518 case BUILT_IN_CONSTANT_P
:
8522 val
= fold_builtin_constant_p (arglist
);
8523 /* Gimplification will pull the CALL_EXPR for the builtin out of
8524 an if condition. When not optimizing, we'll not CSE it back.
8525 To avoid link error types of regressions, return false now. */
8526 if (!val
&& !optimize
)
8527 val
= integer_zero_node
;
8532 case BUILT_IN_EXPECT
:
8533 return fold_builtin_expect (arglist
);
8535 case BUILT_IN_CLASSIFY_TYPE
:
8536 return fold_builtin_classify_type (arglist
);
8538 case BUILT_IN_STRLEN
:
8539 return fold_builtin_strlen (arglist
);
8542 case BUILT_IN_FABSF
:
8543 case BUILT_IN_FABSL
:
8544 return fold_builtin_fabs (arglist
, type
);
8548 case BUILT_IN_LLABS
:
8549 case BUILT_IN_IMAXABS
:
8550 return fold_builtin_abs (arglist
, type
);
8553 case BUILT_IN_CONJF
:
8554 case BUILT_IN_CONJL
:
8555 if (validate_arglist (arglist
, COMPLEX_TYPE
, VOID_TYPE
))
8556 return fold (build1 (CONJ_EXPR
, type
, TREE_VALUE (arglist
)));
8559 case BUILT_IN_CREAL
:
8560 case BUILT_IN_CREALF
:
8561 case BUILT_IN_CREALL
:
8562 if (validate_arglist (arglist
, COMPLEX_TYPE
, VOID_TYPE
))
8563 return non_lvalue (fold (build1 (REALPART_EXPR
, type
,
8564 TREE_VALUE (arglist
))));
8567 case BUILT_IN_CIMAG
:
8568 case BUILT_IN_CIMAGF
:
8569 case BUILT_IN_CIMAGL
:
8570 if (validate_arglist (arglist
, COMPLEX_TYPE
, VOID_TYPE
))
8571 return non_lvalue (fold (build1 (IMAGPART_EXPR
, type
,
8572 TREE_VALUE (arglist
))));
8576 case BUILT_IN_CABSF
:
8577 case BUILT_IN_CABSL
:
8578 return fold_builtin_cabs (arglist
, type
);
8581 case BUILT_IN_SQRTF
:
8582 case BUILT_IN_SQRTL
:
8583 return fold_builtin_sqrt (arglist
, type
);
8586 case BUILT_IN_CBRTF
:
8587 case BUILT_IN_CBRTL
:
8588 return fold_builtin_cbrt (arglist
, type
);
8593 return fold_builtin_sin (arglist
);
8598 return fold_builtin_cos (arglist
, type
, fndecl
);
8603 return fold_builtin_exponent (fndecl
, arglist
, &dconste
);
8606 case BUILT_IN_EXP2F
:
8607 case BUILT_IN_EXP2L
:
8608 return fold_builtin_exponent (fndecl
, arglist
, &dconst2
);
8610 case BUILT_IN_EXP10
:
8611 case BUILT_IN_EXP10F
:
8612 case BUILT_IN_EXP10L
:
8613 case BUILT_IN_POW10
:
8614 case BUILT_IN_POW10F
:
8615 case BUILT_IN_POW10L
:
8616 return fold_builtin_exponent (fndecl
, arglist
, &dconst10
);
8621 return fold_builtin_logarithm (fndecl
, arglist
, &dconste
);
8624 case BUILT_IN_LOG2F
:
8625 case BUILT_IN_LOG2L
:
8626 return fold_builtin_logarithm (fndecl
, arglist
, &dconst2
);
8628 case BUILT_IN_LOG10
:
8629 case BUILT_IN_LOG10F
:
8630 case BUILT_IN_LOG10L
:
8631 return fold_builtin_logarithm (fndecl
, arglist
, &dconst10
);
8636 return fold_builtin_tan (arglist
);
8639 case BUILT_IN_ATANF
:
8640 case BUILT_IN_ATANL
:
8641 return fold_builtin_atan (arglist
, type
);
8646 return fold_builtin_pow (fndecl
, arglist
, type
);
8649 case BUILT_IN_POWIF
:
8650 case BUILT_IN_POWIL
:
8651 return fold_builtin_powi (fndecl
, arglist
, type
);
8656 return fold_builtin_inf (type
, true);
8658 case BUILT_IN_HUGE_VAL
:
8659 case BUILT_IN_HUGE_VALF
:
8660 case BUILT_IN_HUGE_VALL
:
8661 return fold_builtin_inf (type
, false);
8666 return fold_builtin_nan (arglist
, type
, true);
8669 case BUILT_IN_NANSF
:
8670 case BUILT_IN_NANSL
:
8671 return fold_builtin_nan (arglist
, type
, false);
8673 case BUILT_IN_FLOOR
:
8674 case BUILT_IN_FLOORF
:
8675 case BUILT_IN_FLOORL
:
8676 return fold_builtin_floor (fndecl
, arglist
);
8679 case BUILT_IN_CEILF
:
8680 case BUILT_IN_CEILL
:
8681 return fold_builtin_ceil (fndecl
, arglist
);
8683 case BUILT_IN_TRUNC
:
8684 case BUILT_IN_TRUNCF
:
8685 case BUILT_IN_TRUNCL
:
8686 return fold_builtin_trunc (fndecl
, arglist
);
8688 case BUILT_IN_ROUND
:
8689 case BUILT_IN_ROUNDF
:
8690 case BUILT_IN_ROUNDL
:
8691 return fold_builtin_round (fndecl
, arglist
);
8693 case BUILT_IN_NEARBYINT
:
8694 case BUILT_IN_NEARBYINTF
:
8695 case BUILT_IN_NEARBYINTL
:
8697 case BUILT_IN_RINTF
:
8698 case BUILT_IN_RINTL
:
8699 return fold_trunc_transparent_mathfn (fndecl
, arglist
);
8701 case BUILT_IN_LCEIL
:
8702 case BUILT_IN_LCEILF
:
8703 case BUILT_IN_LCEILL
:
8704 case BUILT_IN_LLCEIL
:
8705 case BUILT_IN_LLCEILF
:
8706 case BUILT_IN_LLCEILL
:
8707 case BUILT_IN_LFLOOR
:
8708 case BUILT_IN_LFLOORF
:
8709 case BUILT_IN_LFLOORL
:
8710 case BUILT_IN_LLFLOOR
:
8711 case BUILT_IN_LLFLOORF
:
8712 case BUILT_IN_LLFLOORL
:
8713 case BUILT_IN_LROUND
:
8714 case BUILT_IN_LROUNDF
:
8715 case BUILT_IN_LROUNDL
:
8716 case BUILT_IN_LLROUND
:
8717 case BUILT_IN_LLROUNDF
:
8718 case BUILT_IN_LLROUNDL
:
8719 return fold_builtin_int_roundingfn (fndecl
, arglist
);
8721 case BUILT_IN_LRINT
:
8722 case BUILT_IN_LRINTF
:
8723 case BUILT_IN_LRINTL
:
8724 case BUILT_IN_LLRINT
:
8725 case BUILT_IN_LLRINTF
:
8726 case BUILT_IN_LLRINTL
:
8727 return fold_fixed_mathfn (fndecl
, arglist
);
8731 case BUILT_IN_FFSLL
:
8734 case BUILT_IN_CLZLL
:
8737 case BUILT_IN_CTZLL
:
8738 case BUILT_IN_POPCOUNT
:
8739 case BUILT_IN_POPCOUNTL
:
8740 case BUILT_IN_POPCOUNTLL
:
8741 case BUILT_IN_PARITY
:
8742 case BUILT_IN_PARITYL
:
8743 case BUILT_IN_PARITYLL
:
8744 return fold_builtin_bitop (fndecl
, arglist
);
8746 case BUILT_IN_MEMCPY
:
8747 return fold_builtin_memcpy (fndecl
, arglist
);
8749 case BUILT_IN_MEMPCPY
:
8750 return fold_builtin_mempcpy (arglist
, type
, /*endp=*/1);
8752 case BUILT_IN_MEMMOVE
:
8753 return fold_builtin_memmove (arglist
, type
);
8755 case BUILT_IN_SIGNBIT
:
8756 case BUILT_IN_SIGNBITF
:
8757 case BUILT_IN_SIGNBITL
:
8758 return fold_builtin_signbit (fndecl
, arglist
);
8760 case BUILT_IN_ISASCII
:
8761 return fold_builtin_isascii (arglist
);
8763 case BUILT_IN_TOASCII
:
8764 return fold_builtin_toascii (arglist
);
8766 case BUILT_IN_ISDIGIT
:
8767 return fold_builtin_isdigit (arglist
);
8769 case BUILT_IN_COPYSIGN
:
8770 case BUILT_IN_COPYSIGNF
:
8771 case BUILT_IN_COPYSIGNL
:
8772 return fold_builtin_copysign (fndecl
, arglist
, type
);
8774 case BUILT_IN_FINITE
:
8775 case BUILT_IN_FINITEF
:
8776 case BUILT_IN_FINITEL
:
8777 return fold_builtin_classify (fndecl
, arglist
, BUILT_IN_FINITE
);
8779 case BUILT_IN_ISINF
:
8780 case BUILT_IN_ISINFF
:
8781 case BUILT_IN_ISINFL
:
8782 return fold_builtin_classify (fndecl
, arglist
, BUILT_IN_ISINF
);
8784 case BUILT_IN_ISNAN
:
8785 case BUILT_IN_ISNANF
:
8786 case BUILT_IN_ISNANL
:
8787 return fold_builtin_classify (fndecl
, arglist
, BUILT_IN_ISNAN
);
8789 case BUILT_IN_ISGREATER
:
8790 return fold_builtin_unordered_cmp (fndecl
, arglist
, UNLE_EXPR
, LE_EXPR
);
8791 case BUILT_IN_ISGREATEREQUAL
:
8792 return fold_builtin_unordered_cmp (fndecl
, arglist
, UNLT_EXPR
, LT_EXPR
);
8793 case BUILT_IN_ISLESS
:
8794 return fold_builtin_unordered_cmp (fndecl
, arglist
, UNGE_EXPR
, GE_EXPR
);
8795 case BUILT_IN_ISLESSEQUAL
:
8796 return fold_builtin_unordered_cmp (fndecl
, arglist
, UNGT_EXPR
, GT_EXPR
);
8797 case BUILT_IN_ISLESSGREATER
:
8798 return fold_builtin_unordered_cmp (fndecl
, arglist
, UNEQ_EXPR
, EQ_EXPR
);
8799 case BUILT_IN_ISUNORDERED
:
8800 return fold_builtin_unordered_cmp (fndecl
, arglist
, UNORDERED_EXPR
,
8803 /* We do the folding for va_start in the expander. */
8804 case BUILT_IN_VA_START
:
8808 if (fcode
>= BUILT_IN_COMPLEX_MUL_MIN
8809 && fcode
<= BUILT_IN_COMPLEX_MUL_MAX
)
8810 return fold_builtin_complex_mul (type
, arglist
);
8811 if (fcode
>= BUILT_IN_COMPLEX_DIV_MIN
8812 && fcode
<= BUILT_IN_COMPLEX_DIV_MAX
)
8813 return fold_builtin_complex_div (type
, arglist
);
8820 /* A wrapper function for builtin folding that prevents warnings for
8821 "statement without effect" and the like, caused by removing the
8822 call node earlier than the warning is generated. */
8825 fold_builtin (tree fndecl
, tree arglist
, bool ignore
)
8827 tree exp
= fold_builtin_1 (fndecl
, arglist
, ignore
);
8830 /* ??? Don't clobber shared nodes such as integer_zero_node. */
8831 if (CONSTANT_CLASS_P (exp
))
8832 exp
= build1 (NOP_EXPR
, TREE_TYPE (exp
), exp
);
8833 TREE_NO_WARNING (exp
) = 1;
8839 /* Conveniently construct a function call expression. */
8842 build_function_call_expr (tree fn
, tree arglist
)
8846 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
8847 call_expr
= build3 (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
8848 call_expr
, arglist
, NULL_TREE
);
8849 return fold (call_expr
);
8852 /* This function validates the types of a function call argument list
8853 represented as a tree chain of parameters against a specified list
8854 of tree_codes. If the last specifier is a 0, that represents an
8855 ellipses, otherwise the last specifier must be a VOID_TYPE. */
8858 validate_arglist (tree arglist
, ...)
8860 enum tree_code code
;
8864 va_start (ap
, arglist
);
8868 code
= va_arg (ap
, enum tree_code
);
8872 /* This signifies an ellipses, any further arguments are all ok. */
8876 /* This signifies an endlink, if no arguments remain, return
8877 true, otherwise return false. */
8881 /* If no parameters remain or the parameter's code does not
8882 match the specified code, return false. Otherwise continue
8883 checking any remaining arguments. */
8886 if (code
== POINTER_TYPE
)
8888 if (! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist
))))
8891 else if (code
!= TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))))
8895 arglist
= TREE_CHAIN (arglist
);
8899 /* We need gotos here since we can only have one VA_CLOSE in a
8907 /* Default target-specific builtin expander that does nothing. */
8910 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
8911 rtx target ATTRIBUTE_UNUSED
,
8912 rtx subtarget ATTRIBUTE_UNUSED
,
8913 enum machine_mode mode ATTRIBUTE_UNUSED
,
8914 int ignore ATTRIBUTE_UNUSED
)
8919 /* Returns true is EXP represents data that would potentially reside
8920 in a readonly section. */
8923 readonly_data_expr (tree exp
)
8927 if (TREE_CODE (exp
) != ADDR_EXPR
)
8930 exp
= get_base_address (TREE_OPERAND (exp
, 0));
8934 /* Make sure we call decl_readonly_section only for trees it
8935 can handle (since it returns true for everything it doesn't
8937 if (TREE_CODE (exp
) == STRING_CST
8938 || TREE_CODE (exp
) == CONSTRUCTOR
8939 || (TREE_CODE (exp
) == VAR_DECL
&& TREE_STATIC (exp
)))
8940 return decl_readonly_section (exp
, 0);
8945 /* Simplify a call to the strstr builtin.
8947 Return 0 if no simplification was possible, otherwise return the
8948 simplified form of the call as a tree.
8950 The simplified form may be a constant or other expression which
8951 computes the same value, but in a more efficient manner (including
8952 calls to other builtin functions).
8954 The call may contain arguments which need to be evaluated, but
8955 which are not useful to determine the result of the call. In
8956 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8957 COMPOUND_EXPR will be an argument which must be evaluated.
8958 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8959 COMPOUND_EXPR in the chain will contain the tree for the simplified
8960 form of the builtin function call. */
8963 fold_builtin_strstr (tree arglist
, tree type
)
8965 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
8969 tree s1
= TREE_VALUE (arglist
), s2
= TREE_VALUE (TREE_CHAIN (arglist
));
8971 const char *p1
, *p2
;
8980 const char *r
= strstr (p1
, p2
);
8984 return build_int_cst (TREE_TYPE (s1
), 0);
8986 /* Return an offset into the constant string argument. */
8987 tem
= fold (build2 (PLUS_EXPR
, TREE_TYPE (s1
),
8988 s1
, build_int_cst (TREE_TYPE (s1
), r
- p1
)));
8989 return fold_convert (type
, tem
);
8998 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
9002 /* New argument list transforming strstr(s1, s2) to
9003 strchr(s1, s2[0]). */
9004 arglist
= build_tree_list (NULL_TREE
,
9005 build_int_cst (NULL_TREE
, p2
[0]));
9006 arglist
= tree_cons (NULL_TREE
, s1
, arglist
);
9007 return build_function_call_expr (fn
, arglist
);
9011 /* Simplify a call to the strchr builtin.
9013 Return 0 if no simplification was possible, otherwise return the
9014 simplified form of the call as a tree.
9016 The simplified form may be a constant or other expression which
9017 computes the same value, but in a more efficient manner (including
9018 calls to other builtin functions).
9020 The call may contain arguments which need to be evaluated, but
9021 which are not useful to determine the result of the call. In
9022 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9023 COMPOUND_EXPR will be an argument which must be evaluated.
9024 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9025 COMPOUND_EXPR in the chain will contain the tree for the simplified
9026 form of the builtin function call. */
9029 fold_builtin_strchr (tree arglist
, tree type
)
9031 if (!validate_arglist (arglist
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
9035 tree s1
= TREE_VALUE (arglist
), s2
= TREE_VALUE (TREE_CHAIN (arglist
));
9038 if (TREE_CODE (s2
) != INTEGER_CST
)
9048 if (target_char_cast (s2
, &c
))
9054 return build_int_cst (TREE_TYPE (s1
), 0);
9056 /* Return an offset into the constant string argument. */
9057 tem
= fold (build2 (PLUS_EXPR
, TREE_TYPE (s1
),
9058 s1
, build_int_cst (TREE_TYPE (s1
), r
- p1
)));
9059 return fold_convert (type
, tem
);
9065 /* Simplify a call to the strrchr builtin.
9067 Return 0 if no simplification was possible, otherwise return the
9068 simplified form of the call as a tree.
9070 The simplified form may be a constant or other expression which
9071 computes the same value, but in a more efficient manner (including
9072 calls to other builtin functions).
9074 The call may contain arguments which need to be evaluated, but
9075 which are not useful to determine the result of the call. In
9076 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9077 COMPOUND_EXPR will be an argument which must be evaluated.
9078 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9079 COMPOUND_EXPR in the chain will contain the tree for the simplified
9080 form of the builtin function call. */
9083 fold_builtin_strrchr (tree arglist
, tree type
)
9085 if (!validate_arglist (arglist
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
9089 tree s1
= TREE_VALUE (arglist
), s2
= TREE_VALUE (TREE_CHAIN (arglist
));
9093 if (TREE_CODE (s2
) != INTEGER_CST
)
9103 if (target_char_cast (s2
, &c
))
9106 r
= strrchr (p1
, c
);
9109 return build_int_cst (TREE_TYPE (s1
), 0);
9111 /* Return an offset into the constant string argument. */
9112 tem
= fold (build2 (PLUS_EXPR
, TREE_TYPE (s1
),
9113 s1
, build_int_cst (TREE_TYPE (s1
), r
- p1
)));
9114 return fold_convert (type
, tem
);
9117 if (! integer_zerop (s2
))
9120 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
9124 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
9125 return build_function_call_expr (fn
, arglist
);
9129 /* Simplify a call to the strpbrk builtin.
9131 Return 0 if no simplification was possible, otherwise return the
9132 simplified form of the call as a tree.
9134 The simplified form may be a constant or other expression which
9135 computes the same value, but in a more efficient manner (including
9136 calls to other builtin functions).
9138 The call may contain arguments which need to be evaluated, but
9139 which are not useful to determine the result of the call. In
9140 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9141 COMPOUND_EXPR will be an argument which must be evaluated.
9142 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9143 COMPOUND_EXPR in the chain will contain the tree for the simplified
9144 form of the builtin function call. */
9147 fold_builtin_strpbrk (tree arglist
, tree type
)
9149 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
9153 tree s1
= TREE_VALUE (arglist
), s2
= TREE_VALUE (TREE_CHAIN (arglist
));
9155 const char *p1
, *p2
;
9164 const char *r
= strpbrk (p1
, p2
);
9168 return build_int_cst (TREE_TYPE (s1
), 0);
9170 /* Return an offset into the constant string argument. */
9171 tem
= fold (build2 (PLUS_EXPR
, TREE_TYPE (s1
),
9172 s1
, build_int_cst (TREE_TYPE (s1
), r
- p1
)));
9173 return fold_convert (type
, tem
);
9177 /* strpbrk(x, "") == NULL.
9178 Evaluate and ignore s1 in case it had side-effects. */
9179 return omit_one_operand (TREE_TYPE (s1
), integer_zero_node
, s1
);
9182 return 0; /* Really call strpbrk. */
9184 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
9188 /* New argument list transforming strpbrk(s1, s2) to
9189 strchr(s1, s2[0]). */
9190 arglist
= build_tree_list (NULL_TREE
,
9191 build_int_cst (NULL_TREE
, p2
[0]));
9192 arglist
= tree_cons (NULL_TREE
, s1
, arglist
);
9193 return build_function_call_expr (fn
, arglist
);
9197 /* Simplify a call to the strcat builtin.
9199 Return 0 if no simplification was possible, otherwise return the
9200 simplified form of the call as a tree.
9202 The simplified form may be a constant or other expression which
9203 computes the same value, but in a more efficient manner (including
9204 calls to other builtin functions).
9206 The call may contain arguments which need to be evaluated, but
9207 which are not useful to determine the result of the call. In
9208 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9209 COMPOUND_EXPR will be an argument which must be evaluated.
9210 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9211 COMPOUND_EXPR in the chain will contain the tree for the simplified
9212 form of the builtin function call. */
9215 fold_builtin_strcat (tree arglist
)
9217 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
9221 tree dst
= TREE_VALUE (arglist
),
9222 src
= TREE_VALUE (TREE_CHAIN (arglist
));
9223 const char *p
= c_getstr (src
);
9225 /* If the string length is zero, return the dst parameter. */
9226 if (p
&& *p
== '\0')
9233 /* Simplify a call to the strncat builtin.
9235 Return 0 if no simplification was possible, otherwise return the
9236 simplified form of the call as a tree.
9238 The simplified form may be a constant or other expression which
9239 computes the same value, but in a more efficient manner (including
9240 calls to other builtin functions).
9242 The call may contain arguments which need to be evaluated, but
9243 which are not useful to determine the result of the call. In
9244 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9245 COMPOUND_EXPR will be an argument which must be evaluated.
9246 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9247 COMPOUND_EXPR in the chain will contain the tree for the simplified
9248 form of the builtin function call. */
9251 fold_builtin_strncat (tree arglist
)
9253 if (!validate_arglist (arglist
,
9254 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
9258 tree dst
= TREE_VALUE (arglist
);
9259 tree src
= TREE_VALUE (TREE_CHAIN (arglist
));
9260 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
9261 const char *p
= c_getstr (src
);
9263 /* If the requested length is zero, or the src parameter string
9264 length is zero, return the dst parameter. */
9265 if (integer_zerop (len
) || (p
&& *p
== '\0'))
9266 return omit_two_operands (TREE_TYPE (dst
), dst
, src
, len
);
9268 /* If the requested len is greater than or equal to the string
9269 length, call strcat. */
9270 if (TREE_CODE (len
) == INTEGER_CST
&& p
9271 && compare_tree_int (len
, strlen (p
)) >= 0)
9274 = tree_cons (NULL_TREE
, dst
, build_tree_list (NULL_TREE
, src
));
9275 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCAT
];
9277 /* If the replacement _DECL isn't initialized, don't do the
9282 return build_function_call_expr (fn
, newarglist
);
9288 /* Simplify a call to the strspn builtin.
9290 Return 0 if no simplification was possible, otherwise return the
9291 simplified form of the call as a tree.
9293 The simplified form may be a constant or other expression which
9294 computes the same value, but in a more efficient manner (including
9295 calls to other builtin functions).
9297 The call may contain arguments which need to be evaluated, but
9298 which are not useful to determine the result of the call. In
9299 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9300 COMPOUND_EXPR will be an argument which must be evaluated.
9301 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9302 COMPOUND_EXPR in the chain will contain the tree for the simplified
9303 form of the builtin function call. */
9306 fold_builtin_strspn (tree arglist
)
9308 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
9312 tree s1
= TREE_VALUE (arglist
), s2
= TREE_VALUE (TREE_CHAIN (arglist
));
9313 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
9315 /* If both arguments are constants, evaluate at compile-time. */
9318 const size_t r
= strspn (p1
, p2
);
9319 return size_int (r
);
9322 /* If either argument is "", return 0. */
9323 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
9324 /* Evaluate and ignore both arguments in case either one has
9326 return omit_two_operands (integer_type_node
, integer_zero_node
,
9332 /* Simplify a call to the strcspn builtin.
9334 Return 0 if no simplification was possible, otherwise return the
9335 simplified form of the call as a tree.
9337 The simplified form may be a constant or other expression which
9338 computes the same value, but in a more efficient manner (including
9339 calls to other builtin functions).
9341 The call may contain arguments which need to be evaluated, but
9342 which are not useful to determine the result of the call. In
9343 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9344 COMPOUND_EXPR will be an argument which must be evaluated.
9345 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9346 COMPOUND_EXPR in the chain will contain the tree for the simplified
9347 form of the builtin function call. */
9350 fold_builtin_strcspn (tree arglist
)
9352 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
9356 tree s1
= TREE_VALUE (arglist
), s2
= TREE_VALUE (TREE_CHAIN (arglist
));
9357 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
9359 /* If both arguments are constants, evaluate at compile-time. */
9362 const size_t r
= strcspn (p1
, p2
);
9363 return size_int (r
);
9366 /* If the first argument is "", return 0. */
9367 if (p1
&& *p1
== '\0')
9369 /* Evaluate and ignore argument s2 in case it has
9371 return omit_one_operand (integer_type_node
,
9372 integer_zero_node
, s2
);
9375 /* If the second argument is "", return __builtin_strlen(s1). */
9376 if (p2
&& *p2
== '\0')
9378 tree newarglist
= build_tree_list (NULL_TREE
, s1
),
9379 fn
= implicit_built_in_decls
[BUILT_IN_STRLEN
];
9381 /* If the replacement _DECL isn't initialized, don't do the
9386 return build_function_call_expr (fn
, newarglist
);
9392 /* Fold a call to the fputs builtin. IGNORE is true if the value returned
9393 by the builtin will be ignored. UNLOCKED is true is true if this
9394 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
9395 the known length of the string. Return NULL_TREE if no simplification
9399 fold_builtin_fputs (tree arglist
, bool ignore
, bool unlocked
, tree len
)
9402 tree fn_fputc
= unlocked
? implicit_built_in_decls
[BUILT_IN_FPUTC_UNLOCKED
]
9403 : implicit_built_in_decls
[BUILT_IN_FPUTC
];
9404 tree fn_fwrite
= unlocked
? implicit_built_in_decls
[BUILT_IN_FWRITE_UNLOCKED
]
9405 : implicit_built_in_decls
[BUILT_IN_FWRITE
];
9407 /* If the return value is used, or the replacement _DECL isn't
9408 initialized, don't do the transformation. */
9409 if (!ignore
|| !fn_fputc
|| !fn_fwrite
)
9412 /* Verify the arguments in the original call. */
9413 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
9417 len
= c_strlen (TREE_VALUE (arglist
), 0);
9419 /* Get the length of the string passed to fputs. If the length
9420 can't be determined, punt. */
9422 || TREE_CODE (len
) != INTEGER_CST
)
9425 switch (compare_tree_int (len
, 1))
9427 case -1: /* length is 0, delete the call entirely . */
9428 return omit_one_operand (integer_type_node
, integer_zero_node
,
9429 TREE_VALUE (TREE_CHAIN (arglist
)));
9431 case 0: /* length is 1, call fputc. */
9433 const char *p
= c_getstr (TREE_VALUE (arglist
));
9437 /* New argument list transforming fputs(string, stream) to
9438 fputc(string[0], stream). */
9439 arglist
= build_tree_list (NULL_TREE
,
9440 TREE_VALUE (TREE_CHAIN (arglist
)));
9441 arglist
= tree_cons (NULL_TREE
,
9442 build_int_cst (NULL_TREE
, p
[0]),
9449 case 1: /* length is greater than 1, call fwrite. */
9453 /* If optimizing for size keep fputs. */
9456 string_arg
= TREE_VALUE (arglist
);
9457 /* New argument list transforming fputs(string, stream) to
9458 fwrite(string, 1, len, stream). */
9459 arglist
= build_tree_list (NULL_TREE
,
9460 TREE_VALUE (TREE_CHAIN (arglist
)));
9461 arglist
= tree_cons (NULL_TREE
, len
, arglist
);
9462 arglist
= tree_cons (NULL_TREE
, size_one_node
, arglist
);
9463 arglist
= tree_cons (NULL_TREE
, string_arg
, arglist
);
9471 /* These optimizations are only performed when the result is ignored,
9472 hence there's no need to cast the result to integer_type_node. */
9473 return build_function_call_expr (fn
, arglist
);
9476 /* Fold the new_arg's arguments (ARGLIST). Returns true if there was an error
9477 produced. False otherwise. This is done so that we don't output the error
9478 or warning twice or three times. */
9480 fold_builtin_next_arg (tree arglist
)
9482 tree fntype
= TREE_TYPE (current_function_decl
);
9484 if (TYPE_ARG_TYPES (fntype
) == 0
9485 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
9488 error ("%<va_start%> used in function with fixed args");
9493 /* Evidently an out of date version of <stdarg.h>; can't validate
9494 va_start's second argument, but can still work as intended. */
9495 warning ("%<__builtin_next_arg%> called without an argument");
9498 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9499 when we checked the arguments and if needed issued a warning. */
9500 else if (!TREE_CHAIN (arglist
)
9501 || !integer_zerop (TREE_VALUE (arglist
))
9502 || !integer_zerop (TREE_VALUE (TREE_CHAIN (arglist
)))
9503 || TREE_CHAIN (TREE_CHAIN (arglist
)))
9505 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
9506 tree arg
= TREE_VALUE (arglist
);
9508 if (TREE_CHAIN (arglist
))
9510 error ("%<va_start%> used with too many arguments");
9514 /* Strip off all nops for the sake of the comparison. This
9515 is not quite the same as STRIP_NOPS. It does more.
9516 We must also strip off INDIRECT_EXPR for C++ reference
9518 while (TREE_CODE (arg
) == NOP_EXPR
9519 || TREE_CODE (arg
) == CONVERT_EXPR
9520 || TREE_CODE (arg
) == NON_LVALUE_EXPR
9521 || TREE_CODE (arg
) == INDIRECT_REF
)
9522 arg
= TREE_OPERAND (arg
, 0);
9523 if (arg
!= last_parm
)
9525 /* FIXME: Sometimes with the tree optimizers we can get the
9526 not the last argument even though the user used the last
9527 argument. We just warn and set the arg to be the last
9528 argument so that we will get wrong-code because of
9530 warning ("second parameter of %<va_start%> not last named argument");
9532 /* We want to verify the second parameter just once before the tree
9533 optimizers are run and then avoid keeping it in the tree,
9534 as otherwise we could warn even for correct code like:
9535 void foo (int i, ...)
9536 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9537 TREE_VALUE (arglist
) = integer_zero_node
;
9538 TREE_CHAIN (arglist
) = build_tree_list (NULL
, integer_zero_node
);
9544 /* Simplify a call to the sprintf builtin.
9546 Return 0 if no simplification was possible, otherwise return the
9547 simplified form of the call as a tree. If IGNORED is true, it means that
9548 the caller does not use the returned value of the function. */
9551 fold_builtin_sprintf (tree arglist
, int ignored
)
9553 tree call
, retval
, dest
, fmt
;
9554 const char *fmt_str
= NULL
;
9556 /* Verify the required arguments in the original call. We deal with two
9557 types of sprintf() calls: 'sprintf (str, fmt)' and
9558 'sprintf (dest, "%s", orig)'. */
9559 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
)
9560 && !validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, POINTER_TYPE
,
9564 /* Get the destination string and the format specifier. */
9565 dest
= TREE_VALUE (arglist
);
9566 fmt
= TREE_VALUE (TREE_CHAIN (arglist
));
9568 /* Check whether the format is a literal string constant. */
9569 fmt_str
= c_getstr (fmt
);
9570 if (fmt_str
== NULL
)
9576 /* If the format doesn't contain % args or %%, use strcpy. */
9577 if (strchr (fmt_str
, '%') == NULL
)
9579 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
9584 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
9585 'format' is known to contain no % formats. */
9586 arglist
= build_tree_list (NULL_TREE
, fmt
);
9587 arglist
= tree_cons (NULL_TREE
, dest
, arglist
);
9588 call
= build_function_call_expr (fn
, arglist
);
9590 retval
= build_int_cst (NULL_TREE
, strlen (fmt_str
));
9593 /* If the format is "%s", use strcpy if the result isn't used. */
9594 else if (fmt_str
&& strcmp (fmt_str
, "%s") == 0)
9597 fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
9602 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
9603 orig
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
9604 arglist
= build_tree_list (NULL_TREE
, orig
);
9605 arglist
= tree_cons (NULL_TREE
, dest
, arglist
);
9608 retval
= c_strlen (orig
, 1);
9609 if (!retval
|| TREE_CODE (retval
) != INTEGER_CST
)
9612 call
= build_function_call_expr (fn
, arglist
);
9618 (TREE_TYPE (TREE_TYPE (implicit_built_in_decls
[BUILT_IN_SPRINTF
])),
9620 return build2 (COMPOUND_EXPR
, TREE_TYPE (retval
), call
, retval
);