1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
41 #include "typeclass.h"
46 #include "langhooks.h"
48 #define CALLED_AS_BUILT_IN(NODE) \
49 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
51 /* Register mappings for target machines without register windows. */
52 #ifndef INCOMING_REGNO
53 #define INCOMING_REGNO(OUT) (OUT)
55 #ifndef OUTGOING_REGNO
56 #define OUTGOING_REGNO(IN) (IN)
59 #ifndef PAD_VARARGS_DOWN
60 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
63 /* Define the names of the builtin function types and codes. */
64 const char *const built_in_class_names
[4]
65 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
67 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM) #X,
68 const char *const built_in_names
[(int) END_BUILTINS
] =
70 #include "builtins.def"
74 /* Setup an array of _DECL trees, make sure each element is
75 initialized to NULL_TREE. */
76 tree built_in_decls
[(int) END_BUILTINS
];
77 /* Declarations used when constructing the builtin implicitly in the compiler.
78 It may be NULL_TREE when this is invalid (for instance runtime is not
79 required to implement the function call in all cases. */
80 tree implicit_built_in_decls
[(int) END_BUILTINS
];
82 /* Trigonometric and mathematical constants used in builtin folding. */
83 static bool builtin_dconsts_init
= 0;
84 static REAL_VALUE_TYPE dconstpi
;
85 static REAL_VALUE_TYPE dconste
;
87 static int get_pointer_alignment (tree
, unsigned int);
88 static tree
c_strlen (tree
);
89 static const char *c_getstr (tree
);
90 static rtx
c_readstr (const char *, enum machine_mode
);
91 static int target_char_cast (tree
, char *);
92 static rtx
get_memory_rtx (tree
);
93 static int apply_args_size (void);
94 static int apply_result_size (void);
95 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
96 static rtx
result_vector (int, rtx
);
98 static rtx
expand_builtin_setjmp (tree
, rtx
);
99 static void expand_builtin_prefetch (tree
);
100 static rtx
expand_builtin_apply_args (void);
101 static rtx
expand_builtin_apply_args_1 (void);
102 static rtx
expand_builtin_apply (rtx
, rtx
, rtx
);
103 static void expand_builtin_return (rtx
);
104 static enum type_class
type_to_class (tree
);
105 static rtx
expand_builtin_classify_type (tree
);
106 static void expand_errno_check (tree
, rtx
);
107 static rtx
expand_builtin_mathfn (tree
, rtx
, rtx
);
108 static rtx
expand_builtin_mathfn_2 (tree
, rtx
, rtx
);
109 static rtx
expand_builtin_constant_p (tree
, enum machine_mode
);
110 static rtx
expand_builtin_args_info (tree
);
111 static rtx
expand_builtin_next_arg (tree
);
112 static rtx
expand_builtin_va_start (tree
);
113 static rtx
expand_builtin_va_end (tree
);
114 static rtx
expand_builtin_va_copy (tree
);
115 static rtx
expand_builtin_memcmp (tree
, tree
, rtx
, enum machine_mode
);
116 static rtx
expand_builtin_strcmp (tree
, rtx
, enum machine_mode
);
117 static rtx
expand_builtin_strncmp (tree
, rtx
, enum machine_mode
);
118 static rtx
builtin_memcpy_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
119 static rtx
expand_builtin_strcat (tree
, rtx
, enum machine_mode
);
120 static rtx
expand_builtin_strncat (tree
, rtx
, enum machine_mode
);
121 static rtx
expand_builtin_strspn (tree
, rtx
, enum machine_mode
);
122 static rtx
expand_builtin_strcspn (tree
, rtx
, enum machine_mode
);
123 static rtx
expand_builtin_memcpy (tree
, rtx
, enum machine_mode
);
124 static rtx
expand_builtin_mempcpy (tree
, rtx
, enum machine_mode
, int);
125 static rtx
expand_builtin_memmove (tree
, rtx
, enum machine_mode
);
126 static rtx
expand_builtin_bcopy (tree
);
127 static rtx
expand_builtin_strcpy (tree
, rtx
, enum machine_mode
);
128 static rtx
expand_builtin_stpcpy (tree
, rtx
, enum machine_mode
);
129 static rtx
builtin_strncpy_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
130 static rtx
expand_builtin_strncpy (tree
, rtx
, enum machine_mode
);
131 static rtx
builtin_memset_read_str (void *, HOST_WIDE_INT
, enum machine_mode
);
132 static rtx
builtin_memset_gen_str (void *, HOST_WIDE_INT
, enum machine_mode
);
133 static rtx
expand_builtin_memset (tree
, rtx
, enum machine_mode
);
134 static rtx
expand_builtin_bzero (tree
);
135 static rtx
expand_builtin_strlen (tree
, rtx
, enum machine_mode
);
136 static rtx
expand_builtin_strstr (tree
, rtx
, enum machine_mode
);
137 static rtx
expand_builtin_strpbrk (tree
, rtx
, enum machine_mode
);
138 static rtx
expand_builtin_strchr (tree
, rtx
, enum machine_mode
);
139 static rtx
expand_builtin_strrchr (tree
, rtx
, enum machine_mode
);
140 static rtx
expand_builtin_alloca (tree
, rtx
);
141 static rtx
expand_builtin_unop (enum machine_mode
, tree
, rtx
, rtx
, optab
);
142 static rtx
expand_builtin_frame_address (tree
, tree
);
143 static rtx
expand_builtin_fputs (tree
, int, int);
144 static tree
stabilize_va_list (tree
, int);
145 static rtx
expand_builtin_expect (tree
, rtx
);
146 static tree
fold_builtin_constant_p (tree
);
147 static tree
fold_builtin_classify_type (tree
);
148 static tree
fold_builtin_inf (tree
, int);
149 static tree
fold_builtin_nan (tree
, tree
, int);
150 static int validate_arglist (tree
, ...);
151 static tree
fold_trunc_transparent_mathfn (tree
);
152 static bool readonly_data_expr (tree
);
153 static rtx
expand_builtin_fabs (tree
, rtx
, rtx
);
154 static rtx
expand_builtin_cabs (tree
, rtx
);
155 static void init_builtin_dconsts (void);
156 static tree
fold_builtin_cabs (tree
, tree
, tree
);
158 /* Initialize mathematical constants for constant folding builtins.
159 These constants need to be given to at least 160 bits precision. */
162 init_builtin_dconsts (void)
164 real_from_string (&dconstpi
,
165 "3.1415926535897932384626433832795028841971693993751058209749445923078");
166 real_from_string (&dconste
,
167 "2.7182818284590452353602874713526624977572470936999595749669676277241");
169 builtin_dconsts_init
= true;
172 /* Return the alignment in bits of EXP, a pointer valued expression.
173 But don't return more than MAX_ALIGN no matter what.
174 The alignment returned is, by default, the alignment of the thing that
175 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
177 Otherwise, look at the expression to see if we can do better, i.e., if the
178 expression is actually pointing at an object whose alignment is tighter. */
181 get_pointer_alignment (tree exp
, unsigned int max_align
)
183 unsigned int align
, inner
;
185 if (TREE_CODE (TREE_TYPE (exp
)) != POINTER_TYPE
)
188 align
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
189 align
= MIN (align
, max_align
);
193 switch (TREE_CODE (exp
))
197 case NON_LVALUE_EXPR
:
198 exp
= TREE_OPERAND (exp
, 0);
199 if (TREE_CODE (TREE_TYPE (exp
)) != POINTER_TYPE
)
202 inner
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
203 align
= MIN (inner
, max_align
);
207 /* If sum of pointer + int, restrict our maximum alignment to that
208 imposed by the integer. If not, we can't do any better than
210 if (! host_integerp (TREE_OPERAND (exp
, 1), 1))
213 while (((tree_low_cst (TREE_OPERAND (exp
, 1), 1))
214 & (max_align
/ BITS_PER_UNIT
- 1))
218 exp
= TREE_OPERAND (exp
, 0);
222 /* See what we are pointing at and look at its alignment. */
223 exp
= TREE_OPERAND (exp
, 0);
224 if (TREE_CODE (exp
) == FUNCTION_DECL
)
225 align
= FUNCTION_BOUNDARY
;
226 else if (DECL_P (exp
))
227 align
= DECL_ALIGN (exp
);
228 #ifdef CONSTANT_ALIGNMENT
229 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'c')
230 align
= CONSTANT_ALIGNMENT (exp
, align
);
232 return MIN (align
, max_align
);
240 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
241 way, because it could contain a zero byte in the middle.
242 TREE_STRING_LENGTH is the size of the character array, not the string.
244 The value returned is of type `ssizetype'.
246 Unfortunately, string_constant can't access the values of const char
247 arrays with initializers, so neither can we do so here. */
253 HOST_WIDE_INT offset
;
257 src
= string_constant (src
, &offset_node
);
261 max
= TREE_STRING_LENGTH (src
) - 1;
262 ptr
= TREE_STRING_POINTER (src
);
264 if (offset_node
&& TREE_CODE (offset_node
) != INTEGER_CST
)
266 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
267 compute the offset to the following null if we don't know where to
268 start searching for it. */
271 for (i
= 0; i
< max
; i
++)
275 /* We don't know the starting offset, but we do know that the string
276 has no internal zero bytes. We can assume that the offset falls
277 within the bounds of the string; otherwise, the programmer deserves
278 what he gets. Subtract the offset from the length of the string,
279 and return that. This would perhaps not be valid if we were dealing
280 with named arrays in addition to literal string constants. */
282 return size_diffop (size_int (max
), offset_node
);
285 /* We have a known offset into the string. Start searching there for
286 a null character if we can represent it as a single HOST_WIDE_INT. */
287 if (offset_node
== 0)
289 else if (! host_integerp (offset_node
, 0))
292 offset
= tree_low_cst (offset_node
, 0);
294 /* If the offset is known to be out of bounds, warn, and call strlen at
296 if (offset
< 0 || offset
> max
)
298 warning ("offset outside bounds of constant string");
302 /* Use strlen to search for the first zero byte. Since any strings
303 constructed with build_string will have nulls appended, we win even
304 if we get handed something like (char[4])"abcd".
306 Since OFFSET is our starting index into the string, no further
307 calculation is needed. */
308 return ssize_int (strlen (ptr
+ offset
));
311 /* Return a char pointer for a C string if it is a string constant
312 or sum of string constant and integer constant. */
319 src
= string_constant (src
, &offset_node
);
323 if (offset_node
== 0)
324 return TREE_STRING_POINTER (src
);
325 else if (!host_integerp (offset_node
, 1)
326 || compare_tree_int (offset_node
, TREE_STRING_LENGTH (src
) - 1) > 0)
329 return TREE_STRING_POINTER (src
) + tree_low_cst (offset_node
, 1);
332 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
333 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
336 c_readstr (const char *str
, enum machine_mode mode
)
342 if (GET_MODE_CLASS (mode
) != MODE_INT
)
347 for (i
= 0; i
< GET_MODE_SIZE (mode
); i
++)
350 if (WORDS_BIG_ENDIAN
)
351 j
= GET_MODE_SIZE (mode
) - i
- 1;
352 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
353 && GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
354 j
= j
+ UNITS_PER_WORD
- 2 * (j
% UNITS_PER_WORD
) - 1;
356 if (j
> 2 * HOST_BITS_PER_WIDE_INT
)
359 ch
= (unsigned char) str
[i
];
360 c
[j
/ HOST_BITS_PER_WIDE_INT
] |= ch
<< (j
% HOST_BITS_PER_WIDE_INT
);
362 return immed_double_const (c
[0], c
[1], mode
);
365 /* Cast a target constant CST to target CHAR and if that value fits into
366 host char type, return zero and put that value into variable pointed by
370 target_char_cast (tree cst
, char *p
)
372 unsigned HOST_WIDE_INT val
, hostval
;
374 if (!host_integerp (cst
, 1)
375 || CHAR_TYPE_SIZE
> HOST_BITS_PER_WIDE_INT
)
378 val
= tree_low_cst (cst
, 1);
379 if (CHAR_TYPE_SIZE
< HOST_BITS_PER_WIDE_INT
)
380 val
&= (((unsigned HOST_WIDE_INT
) 1) << CHAR_TYPE_SIZE
) - 1;
383 if (HOST_BITS_PER_CHAR
< HOST_BITS_PER_WIDE_INT
)
384 hostval
&= (((unsigned HOST_WIDE_INT
) 1) << HOST_BITS_PER_CHAR
) - 1;
393 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
394 times to get the address of either a higher stack frame, or a return
395 address located within it (depending on FNDECL_CODE). */
398 expand_builtin_return_addr (enum built_in_function fndecl_code
, int count
,
403 /* Some machines need special handling before we can access
404 arbitrary frames. For example, on the sparc, we must first flush
405 all register windows to the stack. */
406 #ifdef SETUP_FRAME_ADDRESSES
408 SETUP_FRAME_ADDRESSES ();
411 /* On the sparc, the return address is not in the frame, it is in a
412 register. There is no way to access it off of the current frame
413 pointer, but it can be accessed off the previous frame pointer by
414 reading the value from the register window save area. */
415 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
416 if (fndecl_code
== BUILT_IN_RETURN_ADDRESS
)
420 /* Scan back COUNT frames to the specified frame. */
421 for (i
= 0; i
< count
; i
++)
423 /* Assume the dynamic chain pointer is in the word that the
424 frame address points to, unless otherwise specified. */
425 #ifdef DYNAMIC_CHAIN_ADDRESS
426 tem
= DYNAMIC_CHAIN_ADDRESS (tem
);
428 tem
= memory_address (Pmode
, tem
);
429 tem
= gen_rtx_MEM (Pmode
, tem
);
430 set_mem_alias_set (tem
, get_frame_alias_set ());
431 tem
= copy_to_reg (tem
);
434 /* For __builtin_frame_address, return what we've got. */
435 if (fndecl_code
== BUILT_IN_FRAME_ADDRESS
)
438 /* For __builtin_return_address, Get the return address from that
440 #ifdef RETURN_ADDR_RTX
441 tem
= RETURN_ADDR_RTX (count
, tem
);
443 tem
= memory_address (Pmode
,
444 plus_constant (tem
, GET_MODE_SIZE (Pmode
)));
445 tem
= gen_rtx_MEM (Pmode
, tem
);
446 set_mem_alias_set (tem
, get_frame_alias_set ());
451 /* Alias set used for setjmp buffer. */
452 static HOST_WIDE_INT setjmp_alias_set
= -1;
454 /* Construct the leading half of a __builtin_setjmp call. Control will
455 return to RECEIVER_LABEL. This is used directly by sjlj exception
459 expand_builtin_setjmp_setup (rtx buf_addr
, rtx receiver_label
)
461 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
465 if (setjmp_alias_set
== -1)
466 setjmp_alias_set
= new_alias_set ();
468 #ifdef POINTERS_EXTEND_UNSIGNED
469 if (GET_MODE (buf_addr
) != Pmode
)
470 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
473 buf_addr
= force_reg (Pmode
, force_operand (buf_addr
, NULL_RTX
));
477 /* We store the frame pointer and the address of receiver_label in
478 the buffer and use the rest of it for the stack save area, which
479 is machine-dependent. */
481 #ifndef BUILTIN_SETJMP_FRAME_VALUE
482 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
485 mem
= gen_rtx_MEM (Pmode
, buf_addr
);
486 set_mem_alias_set (mem
, setjmp_alias_set
);
487 emit_move_insn (mem
, BUILTIN_SETJMP_FRAME_VALUE
);
489 mem
= gen_rtx_MEM (Pmode
, plus_constant (buf_addr
, GET_MODE_SIZE (Pmode
))),
490 set_mem_alias_set (mem
, setjmp_alias_set
);
492 emit_move_insn (validize_mem (mem
),
493 force_reg (Pmode
, gen_rtx_LABEL_REF (Pmode
, receiver_label
)));
495 stack_save
= gen_rtx_MEM (sa_mode
,
496 plus_constant (buf_addr
,
497 2 * GET_MODE_SIZE (Pmode
)));
498 set_mem_alias_set (stack_save
, setjmp_alias_set
);
499 emit_stack_save (SAVE_NONLOCAL
, &stack_save
, NULL_RTX
);
501 /* If there is further processing to do, do it. */
502 #ifdef HAVE_builtin_setjmp_setup
503 if (HAVE_builtin_setjmp_setup
)
504 emit_insn (gen_builtin_setjmp_setup (buf_addr
));
507 /* Tell optimize_save_area_alloca that extra work is going to
508 need to go on during alloca. */
509 current_function_calls_setjmp
= 1;
511 /* Set this so all the registers get saved in our frame; we need to be
512 able to copy the saved values for any registers from frames we unwind. */
513 current_function_has_nonlocal_label
= 1;
516 /* Construct the trailing part of a __builtin_setjmp call.
517 This is used directly by sjlj exception handling code. */
520 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED
)
522 /* Clobber the FP when we get here, so we have to make sure it's
523 marked as used by this function. */
524 emit_insn (gen_rtx_USE (VOIDmode
, hard_frame_pointer_rtx
));
526 /* Mark the static chain as clobbered here so life information
527 doesn't get messed up for it. */
528 emit_insn (gen_rtx_CLOBBER (VOIDmode
, static_chain_rtx
));
530 /* Now put in the code to restore the frame pointer, and argument
531 pointer, if needed. The code below is from expand_end_bindings
532 in stmt.c; see detailed documentation there. */
533 #ifdef HAVE_nonlocal_goto
534 if (! HAVE_nonlocal_goto
)
536 emit_move_insn (virtual_stack_vars_rtx
, hard_frame_pointer_rtx
);
538 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
539 if (fixed_regs
[ARG_POINTER_REGNUM
])
541 #ifdef ELIMINABLE_REGS
543 static const struct elims
{const int from
, to
;} elim_regs
[] = ELIMINABLE_REGS
;
545 for (i
= 0; i
< ARRAY_SIZE (elim_regs
); i
++)
546 if (elim_regs
[i
].from
== ARG_POINTER_REGNUM
547 && elim_regs
[i
].to
== HARD_FRAME_POINTER_REGNUM
)
550 if (i
== ARRAY_SIZE (elim_regs
))
553 /* Now restore our arg pointer from the address at which it
554 was saved in our stack frame. */
555 emit_move_insn (virtual_incoming_args_rtx
,
556 copy_to_reg (get_arg_pointer_save_area (cfun
)));
561 #ifdef HAVE_builtin_setjmp_receiver
562 if (HAVE_builtin_setjmp_receiver
)
563 emit_insn (gen_builtin_setjmp_receiver (receiver_label
));
566 #ifdef HAVE_nonlocal_goto_receiver
567 if (HAVE_nonlocal_goto_receiver
)
568 emit_insn (gen_nonlocal_goto_receiver ());
573 /* @@@ This is a kludge. Not all machine descriptions define a blockage
574 insn, but we must not allow the code we just generated to be reordered
575 by scheduling. Specifically, the update of the frame pointer must
576 happen immediately, not later. So emit an ASM_INPUT to act as blockage
578 emit_insn (gen_rtx_ASM_INPUT (VOIDmode
, ""));
581 /* __builtin_setjmp is passed a pointer to an array of five words (not
582 all will be used on all machines). It operates similarly to the C
583 library function of the same name, but is more efficient. Much of
584 the code below (and for longjmp) is copied from the handling of
587 NOTE: This is intended for use by GNAT and the exception handling
588 scheme in the compiler and will only work in the method used by
592 expand_builtin_setjmp (tree arglist
, rtx target
)
594 rtx buf_addr
, next_lab
, cont_lab
;
596 if (!validate_arglist (arglist
, POINTER_TYPE
, VOID_TYPE
))
599 if (target
== 0 || GET_CODE (target
) != REG
600 || REGNO (target
) < FIRST_PSEUDO_REGISTER
)
601 target
= gen_reg_rtx (TYPE_MODE (integer_type_node
));
603 buf_addr
= expand_expr (TREE_VALUE (arglist
), NULL_RTX
, VOIDmode
, 0);
605 next_lab
= gen_label_rtx ();
606 cont_lab
= gen_label_rtx ();
608 expand_builtin_setjmp_setup (buf_addr
, next_lab
);
610 /* Set TARGET to zero and branch to the continue label. */
611 emit_move_insn (target
, const0_rtx
);
612 emit_jump_insn (gen_jump (cont_lab
));
614 emit_label (next_lab
);
616 expand_builtin_setjmp_receiver (next_lab
);
618 /* Set TARGET to one. */
619 emit_move_insn (target
, const1_rtx
);
620 emit_label (cont_lab
);
622 /* Tell flow about the strange goings on. Putting `next_lab' on
623 `nonlocal_goto_handler_labels' to indicates that function
624 calls may traverse the arc back to this label. */
626 current_function_has_nonlocal_label
= 1;
627 nonlocal_goto_handler_labels
628 = gen_rtx_EXPR_LIST (VOIDmode
, next_lab
, nonlocal_goto_handler_labels
);
633 /* __builtin_longjmp is passed a pointer to an array of five words (not
634 all will be used on all machines). It operates similarly to the C
635 library function of the same name, but is more efficient. Much of
636 the code below is copied from the handling of non-local gotos.
638 NOTE: This is intended for use by GNAT and the exception handling
639 scheme in the compiler and will only work in the method used by
643 expand_builtin_longjmp (rtx buf_addr
, rtx value
)
645 rtx fp
, lab
, stack
, insn
, last
;
646 enum machine_mode sa_mode
= STACK_SAVEAREA_MODE (SAVE_NONLOCAL
);
648 if (setjmp_alias_set
== -1)
649 setjmp_alias_set
= new_alias_set ();
651 #ifdef POINTERS_EXTEND_UNSIGNED
652 if (GET_MODE (buf_addr
) != Pmode
)
653 buf_addr
= convert_memory_address (Pmode
, buf_addr
);
656 buf_addr
= force_reg (Pmode
, buf_addr
);
658 /* We used to store value in static_chain_rtx, but that fails if pointers
659 are smaller than integers. We instead require that the user must pass
660 a second argument of 1, because that is what builtin_setjmp will
661 return. This also makes EH slightly more efficient, since we are no
662 longer copying around a value that we don't care about. */
663 if (value
!= const1_rtx
)
666 current_function_calls_longjmp
= 1;
668 last
= get_last_insn ();
669 #ifdef HAVE_builtin_longjmp
670 if (HAVE_builtin_longjmp
)
671 emit_insn (gen_builtin_longjmp (buf_addr
));
675 fp
= gen_rtx_MEM (Pmode
, buf_addr
);
676 lab
= gen_rtx_MEM (Pmode
, plus_constant (buf_addr
,
677 GET_MODE_SIZE (Pmode
)));
679 stack
= gen_rtx_MEM (sa_mode
, plus_constant (buf_addr
,
680 2 * GET_MODE_SIZE (Pmode
)));
681 set_mem_alias_set (fp
, setjmp_alias_set
);
682 set_mem_alias_set (lab
, setjmp_alias_set
);
683 set_mem_alias_set (stack
, setjmp_alias_set
);
685 /* Pick up FP, label, and SP from the block and jump. This code is
686 from expand_goto in stmt.c; see there for detailed comments. */
687 #if HAVE_nonlocal_goto
688 if (HAVE_nonlocal_goto
)
689 /* We have to pass a value to the nonlocal_goto pattern that will
690 get copied into the static_chain pointer, but it does not matter
691 what that value is, because builtin_setjmp does not use it. */
692 emit_insn (gen_nonlocal_goto (value
, lab
, stack
, fp
));
696 lab
= copy_to_reg (lab
);
698 emit_move_insn (hard_frame_pointer_rtx
, fp
);
699 emit_stack_restore (SAVE_NONLOCAL
, stack
, NULL_RTX
);
701 emit_insn (gen_rtx_USE (VOIDmode
, hard_frame_pointer_rtx
));
702 emit_insn (gen_rtx_USE (VOIDmode
, stack_pointer_rtx
));
703 emit_indirect_jump (lab
);
707 /* Search backwards and mark the jump insn as a non-local goto.
708 Note that this precludes the use of __builtin_longjmp to a
709 __builtin_setjmp target in the same function. However, we've
710 already cautioned the user that these functions are for
711 internal exception handling use only. */
712 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
716 if (GET_CODE (insn
) == JUMP_INSN
)
718 REG_NOTES (insn
) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO
, const0_rtx
,
722 else if (GET_CODE (insn
) == CALL_INSN
)
727 /* Expand a call to __builtin_prefetch. For a target that does not support
728 data prefetch, evaluate the memory address argument in case it has side
732 expand_builtin_prefetch (tree arglist
)
734 tree arg0
, arg1
, arg2
;
737 if (!validate_arglist (arglist
, POINTER_TYPE
, 0))
740 arg0
= TREE_VALUE (arglist
);
741 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
742 zero (read) and argument 2 (locality) defaults to 3 (high degree of
744 if (TREE_CHAIN (arglist
))
746 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
747 if (TREE_CHAIN (TREE_CHAIN (arglist
)))
748 arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
750 arg2
= build_int_2 (3, 0);
754 arg1
= integer_zero_node
;
755 arg2
= build_int_2 (3, 0);
758 /* Argument 0 is an address. */
759 op0
= expand_expr (arg0
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
761 /* Argument 1 (read/write flag) must be a compile-time constant int. */
762 if (TREE_CODE (arg1
) != INTEGER_CST
)
764 error ("second arg to `__builtin_prefetch' must be a constant");
765 arg1
= integer_zero_node
;
767 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
768 /* Argument 1 must be either zero or one. */
769 if (INTVAL (op1
) != 0 && INTVAL (op1
) != 1)
771 warning ("invalid second arg to __builtin_prefetch; using zero");
775 /* Argument 2 (locality) must be a compile-time constant int. */
776 if (TREE_CODE (arg2
) != INTEGER_CST
)
778 error ("third arg to `__builtin_prefetch' must be a constant");
779 arg2
= integer_zero_node
;
781 op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
782 /* Argument 2 must be 0, 1, 2, or 3. */
783 if (INTVAL (op2
) < 0 || INTVAL (op2
) > 3)
785 warning ("invalid third arg to __builtin_prefetch; using zero");
792 if ((! (*insn_data
[(int) CODE_FOR_prefetch
].operand
[0].predicate
)
794 insn_data
[(int) CODE_FOR_prefetch
].operand
[0].mode
))
795 || (GET_MODE(op0
) != Pmode
))
797 #ifdef POINTERS_EXTEND_UNSIGNED
798 if (GET_MODE(op0
) != Pmode
)
799 op0
= convert_memory_address (Pmode
, op0
);
801 op0
= force_reg (Pmode
, op0
);
803 emit_insn (gen_prefetch (op0
, op1
, op2
));
807 op0
= protect_from_queue (op0
, 0);
808 /* Don't do anything with direct references to volatile memory, but
809 generate code to handle other side effects. */
810 if (GET_CODE (op0
) != MEM
&& side_effects_p (op0
))
814 /* Get a MEM rtx for expression EXP which is the address of an operand
815 to be used to be used in a string instruction (cmpstrsi, movstrsi, ..). */
818 get_memory_rtx (tree exp
)
820 rtx addr
= expand_expr (exp
, NULL_RTX
, ptr_mode
, EXPAND_SUM
);
823 #ifdef POINTERS_EXTEND_UNSIGNED
824 if (GET_MODE (addr
) != Pmode
)
825 addr
= convert_memory_address (Pmode
, addr
);
828 mem
= gen_rtx_MEM (BLKmode
, memory_address (BLKmode
, addr
));
830 /* Get an expression we can use to find the attributes to assign to MEM.
831 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
832 we can. First remove any nops. */
833 while ((TREE_CODE (exp
) == NOP_EXPR
|| TREE_CODE (exp
) == CONVERT_EXPR
834 || TREE_CODE (exp
) == NON_LVALUE_EXPR
)
835 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
836 exp
= TREE_OPERAND (exp
, 0);
838 if (TREE_CODE (exp
) == ADDR_EXPR
)
840 exp
= TREE_OPERAND (exp
, 0);
841 set_mem_attributes (mem
, exp
, 0);
843 else if (POINTER_TYPE_P (TREE_TYPE (exp
)))
845 exp
= build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (exp
)), exp
);
846 /* memcpy, memset and other builtin stringops can alias with anything. */
847 set_mem_alias_set (mem
, 0);
853 /* Built-in functions to perform an untyped call and return. */
855 /* For each register that may be used for calling a function, this
856 gives a mode used to copy the register's value. VOIDmode indicates
857 the register is not used for calling a function. If the machine
858 has register windows, this gives only the outbound registers.
859 INCOMING_REGNO gives the corresponding inbound register. */
860 static enum machine_mode apply_args_mode
[FIRST_PSEUDO_REGISTER
];
862 /* For each register that may be used for returning values, this gives
863 a mode used to copy the register's value. VOIDmode indicates the
864 register is not used for returning values. If the machine has
865 register windows, this gives only the outbound registers.
866 INCOMING_REGNO gives the corresponding inbound register. */
867 static enum machine_mode apply_result_mode
[FIRST_PSEUDO_REGISTER
];
869 /* For each register that may be used for calling a function, this
870 gives the offset of that register into the block returned by
871 __builtin_apply_args. 0 indicates that the register is not
872 used for calling a function. */
873 static int apply_args_reg_offset
[FIRST_PSEUDO_REGISTER
];
875 /* Return the offset of register REGNO into the block returned by
876 __builtin_apply_args. This is not declared static, since it is
877 needed in objc-act.c. */
880 apply_args_register_offset (int regno
)
884 /* Arguments are always put in outgoing registers (in the argument
885 block) if such make sense. */
886 #ifdef OUTGOING_REGNO
887 regno
= OUTGOING_REGNO (regno
);
889 return apply_args_reg_offset
[regno
];
892 /* Return the size required for the block returned by __builtin_apply_args,
893 and initialize apply_args_mode. */
896 apply_args_size (void)
898 static int size
= -1;
901 enum machine_mode mode
;
903 /* The values computed by this function never change. */
906 /* The first value is the incoming arg-pointer. */
907 size
= GET_MODE_SIZE (Pmode
);
909 /* The second value is the structure value address unless this is
910 passed as an "invisible" first argument. */
911 if (struct_value_rtx
)
912 size
+= GET_MODE_SIZE (Pmode
);
914 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
915 if (FUNCTION_ARG_REGNO_P (regno
))
917 /* Search for the proper mode for copying this register's
918 value. I'm not sure this is right, but it works so far. */
919 enum machine_mode best_mode
= VOIDmode
;
921 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
923 mode
= GET_MODE_WIDER_MODE (mode
))
924 if (HARD_REGNO_MODE_OK (regno
, mode
)
925 && HARD_REGNO_NREGS (regno
, mode
) == 1)
928 if (best_mode
== VOIDmode
)
929 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
931 mode
= GET_MODE_WIDER_MODE (mode
))
932 if (HARD_REGNO_MODE_OK (regno
, mode
)
933 && have_insn_for (SET
, mode
))
936 if (best_mode
== VOIDmode
)
937 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT
);
939 mode
= GET_MODE_WIDER_MODE (mode
))
940 if (HARD_REGNO_MODE_OK (regno
, mode
)
941 && have_insn_for (SET
, mode
))
944 if (best_mode
== VOIDmode
)
945 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT
);
947 mode
= GET_MODE_WIDER_MODE (mode
))
948 if (HARD_REGNO_MODE_OK (regno
, mode
)
949 && have_insn_for (SET
, mode
))
953 if (mode
== VOIDmode
)
956 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
957 if (size
% align
!= 0)
958 size
= CEIL (size
, align
) * align
;
959 apply_args_reg_offset
[regno
] = size
;
960 size
+= GET_MODE_SIZE (mode
);
961 apply_args_mode
[regno
] = mode
;
965 apply_args_mode
[regno
] = VOIDmode
;
966 apply_args_reg_offset
[regno
] = 0;
972 /* Return the size required for the block returned by __builtin_apply,
973 and initialize apply_result_mode. */
976 apply_result_size (void)
978 static int size
= -1;
980 enum machine_mode mode
;
982 /* The values computed by this function never change. */
987 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
988 if (FUNCTION_VALUE_REGNO_P (regno
))
990 /* Search for the proper mode for copying this register's
991 value. I'm not sure this is right, but it works so far. */
992 enum machine_mode best_mode
= VOIDmode
;
994 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
996 mode
= GET_MODE_WIDER_MODE (mode
))
997 if (HARD_REGNO_MODE_OK (regno
, mode
))
1000 if (best_mode
== VOIDmode
)
1001 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
1003 mode
= GET_MODE_WIDER_MODE (mode
))
1004 if (HARD_REGNO_MODE_OK (regno
, mode
)
1005 && have_insn_for (SET
, mode
))
1008 if (best_mode
== VOIDmode
)
1009 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT
);
1011 mode
= GET_MODE_WIDER_MODE (mode
))
1012 if (HARD_REGNO_MODE_OK (regno
, mode
)
1013 && have_insn_for (SET
, mode
))
1016 if (best_mode
== VOIDmode
)
1017 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT
);
1019 mode
= GET_MODE_WIDER_MODE (mode
))
1020 if (HARD_REGNO_MODE_OK (regno
, mode
)
1021 && have_insn_for (SET
, mode
))
1025 if (mode
== VOIDmode
)
1028 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1029 if (size
% align
!= 0)
1030 size
= CEIL (size
, align
) * align
;
1031 size
+= GET_MODE_SIZE (mode
);
1032 apply_result_mode
[regno
] = mode
;
1035 apply_result_mode
[regno
] = VOIDmode
;
1037 /* Allow targets that use untyped_call and untyped_return to override
1038 the size so that machine-specific information can be stored here. */
1039 #ifdef APPLY_RESULT_SIZE
1040 size
= APPLY_RESULT_SIZE
;
1046 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1047 /* Create a vector describing the result block RESULT. If SAVEP is true,
1048 the result block is used to save the values; otherwise it is used to
1049 restore the values. */
1052 result_vector (int savep
, rtx result
)
1054 int regno
, size
, align
, nelts
;
1055 enum machine_mode mode
;
1057 rtx
*savevec
= (rtx
*) alloca (FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
1060 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1061 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1063 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1064 if (size
% align
!= 0)
1065 size
= CEIL (size
, align
) * align
;
1066 reg
= gen_rtx_REG (mode
, savep
? regno
: INCOMING_REGNO (regno
));
1067 mem
= adjust_address (result
, mode
, size
);
1068 savevec
[nelts
++] = (savep
1069 ? gen_rtx_SET (VOIDmode
, mem
, reg
)
1070 : gen_rtx_SET (VOIDmode
, reg
, mem
));
1071 size
+= GET_MODE_SIZE (mode
);
1073 return gen_rtx_PARALLEL (VOIDmode
, gen_rtvec_v (nelts
, savevec
));
1075 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1077 /* Save the state required to perform an untyped call with the same
1078 arguments as were passed to the current function. */
1081 expand_builtin_apply_args_1 (void)
1084 int size
, align
, regno
;
1085 enum machine_mode mode
;
1087 /* Create a block where the arg-pointer, structure value address,
1088 and argument registers can be saved. */
1089 registers
= assign_stack_local (BLKmode
, apply_args_size (), -1);
1091 /* Walk past the arg-pointer and structure value address. */
1092 size
= GET_MODE_SIZE (Pmode
);
1093 if (struct_value_rtx
)
1094 size
+= GET_MODE_SIZE (Pmode
);
1096 /* Save each register used in calling a function to the block. */
1097 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1098 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1102 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1103 if (size
% align
!= 0)
1104 size
= CEIL (size
, align
) * align
;
1106 tem
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1108 emit_move_insn (adjust_address (registers
, mode
, size
), tem
);
1109 size
+= GET_MODE_SIZE (mode
);
1112 /* Save the arg pointer to the block. */
1113 emit_move_insn (adjust_address (registers
, Pmode
, 0),
1114 copy_to_reg (virtual_incoming_args_rtx
));
1115 size
= GET_MODE_SIZE (Pmode
);
1117 /* Save the structure value address unless this is passed as an
1118 "invisible" first argument. */
1119 if (struct_value_incoming_rtx
)
1121 emit_move_insn (adjust_address (registers
, Pmode
, size
),
1122 copy_to_reg (struct_value_incoming_rtx
));
1123 size
+= GET_MODE_SIZE (Pmode
);
1126 /* Return the address of the block. */
1127 return copy_addr_to_reg (XEXP (registers
, 0));
1130 /* __builtin_apply_args returns block of memory allocated on
1131 the stack into which is stored the arg pointer, structure
1132 value address, static chain, and all the registers that might
1133 possibly be used in performing a function call. The code is
1134 moved to the start of the function so the incoming values are
1138 expand_builtin_apply_args (void)
1140 /* Don't do __builtin_apply_args more than once in a function.
1141 Save the result of the first call and reuse it. */
1142 if (apply_args_value
!= 0)
1143 return apply_args_value
;
1145 /* When this function is called, it means that registers must be
1146 saved on entry to this function. So we migrate the
1147 call to the first insn of this function. */
1152 temp
= expand_builtin_apply_args_1 ();
1156 apply_args_value
= temp
;
1158 /* Put the insns after the NOTE that starts the function.
1159 If this is inside a start_sequence, make the outer-level insn
1160 chain current, so the code is placed at the start of the
1162 push_topmost_sequence ();
1163 emit_insn_before (seq
, NEXT_INSN (get_insns ()));
1164 pop_topmost_sequence ();
1169 /* Perform an untyped call and save the state required to perform an
1170 untyped return of whatever value was returned by the given function. */
1173 expand_builtin_apply (rtx function
, rtx arguments
, rtx argsize
)
1175 int size
, align
, regno
;
1176 enum machine_mode mode
;
1177 rtx incoming_args
, result
, reg
, dest
, src
, call_insn
;
1178 rtx old_stack_level
= 0;
1179 rtx call_fusage
= 0;
1181 #ifdef POINTERS_EXTEND_UNSIGNED
1182 if (GET_MODE (arguments
) != Pmode
)
1183 arguments
= convert_memory_address (Pmode
, arguments
);
1186 /* Create a block where the return registers can be saved. */
1187 result
= assign_stack_local (BLKmode
, apply_result_size (), -1);
1189 /* Fetch the arg pointer from the ARGUMENTS block. */
1190 incoming_args
= gen_reg_rtx (Pmode
);
1191 emit_move_insn (incoming_args
, gen_rtx_MEM (Pmode
, arguments
));
1192 #ifndef STACK_GROWS_DOWNWARD
1193 incoming_args
= expand_simple_binop (Pmode
, MINUS
, incoming_args
, argsize
,
1194 incoming_args
, 0, OPTAB_LIB_WIDEN
);
1197 /* Perform postincrements before actually calling the function. */
1200 /* Push a new argument block and copy the arguments. Do not allow
1201 the (potential) memcpy call below to interfere with our stack
1203 do_pending_stack_adjust ();
1206 /* Save the stack with nonlocal if available */
1207 #ifdef HAVE_save_stack_nonlocal
1208 if (HAVE_save_stack_nonlocal
)
1209 emit_stack_save (SAVE_NONLOCAL
, &old_stack_level
, NULL_RTX
);
1212 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
1214 /* Push a block of memory onto the stack to store the memory arguments.
1215 Save the address in a register, and copy the memory arguments. ??? I
1216 haven't figured out how the calling convention macros effect this,
1217 but it's likely that the source and/or destination addresses in
1218 the block copy will need updating in machine specific ways. */
1219 dest
= allocate_dynamic_stack_space (argsize
, 0, BITS_PER_UNIT
);
1220 dest
= gen_rtx_MEM (BLKmode
, dest
);
1221 set_mem_align (dest
, PARM_BOUNDARY
);
1222 src
= gen_rtx_MEM (BLKmode
, incoming_args
);
1223 set_mem_align (src
, PARM_BOUNDARY
);
1224 emit_block_move (dest
, src
, argsize
, BLOCK_OP_NORMAL
);
1226 /* Refer to the argument block. */
1228 arguments
= gen_rtx_MEM (BLKmode
, arguments
);
1229 set_mem_align (arguments
, PARM_BOUNDARY
);
1231 /* Walk past the arg-pointer and structure value address. */
1232 size
= GET_MODE_SIZE (Pmode
);
1233 if (struct_value_rtx
)
1234 size
+= GET_MODE_SIZE (Pmode
);
1236 /* Restore each of the registers previously saved. Make USE insns
1237 for each of these registers for use in making the call. */
1238 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1239 if ((mode
= apply_args_mode
[regno
]) != VOIDmode
)
1241 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1242 if (size
% align
!= 0)
1243 size
= CEIL (size
, align
) * align
;
1244 reg
= gen_rtx_REG (mode
, regno
);
1245 emit_move_insn (reg
, adjust_address (arguments
, mode
, size
));
1246 use_reg (&call_fusage
, reg
);
1247 size
+= GET_MODE_SIZE (mode
);
1250 /* Restore the structure value address unless this is passed as an
1251 "invisible" first argument. */
1252 size
= GET_MODE_SIZE (Pmode
);
1253 if (struct_value_rtx
)
1255 rtx value
= gen_reg_rtx (Pmode
);
1256 emit_move_insn (value
, adjust_address (arguments
, Pmode
, size
));
1257 emit_move_insn (struct_value_rtx
, value
);
1258 if (GET_CODE (struct_value_rtx
) == REG
)
1259 use_reg (&call_fusage
, struct_value_rtx
);
1260 size
+= GET_MODE_SIZE (Pmode
);
1263 /* All arguments and registers used for the call are set up by now! */
1264 function
= prepare_call_address (function
, NULL_TREE
, &call_fusage
, 0, 0);
1266 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1267 and we don't want to load it into a register as an optimization,
1268 because prepare_call_address already did it if it should be done. */
1269 if (GET_CODE (function
) != SYMBOL_REF
)
1270 function
= memory_address (FUNCTION_MODE
, function
);
1272 /* Generate the actual call instruction and save the return value. */
1273 #ifdef HAVE_untyped_call
1274 if (HAVE_untyped_call
)
1275 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE
, function
),
1276 result
, result_vector (1, result
)));
1279 #ifdef HAVE_call_value
1280 if (HAVE_call_value
)
1284 /* Locate the unique return register. It is not possible to
1285 express a call that sets more than one return register using
1286 call_value; use untyped_call for that. In fact, untyped_call
1287 only needs to save the return registers in the given block. */
1288 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1289 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1292 abort (); /* HAVE_untyped_call required. */
1293 valreg
= gen_rtx_REG (mode
, regno
);
1296 emit_call_insn (GEN_CALL_VALUE (valreg
,
1297 gen_rtx_MEM (FUNCTION_MODE
, function
),
1298 const0_rtx
, NULL_RTX
, const0_rtx
));
1300 emit_move_insn (adjust_address (result
, GET_MODE (valreg
), 0), valreg
);
1306 /* Find the CALL insn we just emitted, and attach the register usage
1308 call_insn
= last_call_insn ();
1309 add_function_usage_to (call_insn
, call_fusage
);
1311 /* Restore the stack. */
1312 #ifdef HAVE_save_stack_nonlocal
1313 if (HAVE_save_stack_nonlocal
)
1314 emit_stack_restore (SAVE_NONLOCAL
, old_stack_level
, NULL_RTX
);
1317 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
1321 /* Return the address of the result block. */
1322 return copy_addr_to_reg (XEXP (result
, 0));
1325 /* Perform an untyped return. */
1328 expand_builtin_return (rtx result
)
1330 int size
, align
, regno
;
1331 enum machine_mode mode
;
1333 rtx call_fusage
= 0;
1335 #ifdef POINTERS_EXTEND_UNSIGNED
1336 if (GET_MODE (result
) != Pmode
)
1337 result
= convert_memory_address (Pmode
, result
);
1340 apply_result_size ();
1341 result
= gen_rtx_MEM (BLKmode
, result
);
1343 #ifdef HAVE_untyped_return
1344 if (HAVE_untyped_return
)
1346 emit_jump_insn (gen_untyped_return (result
, result_vector (0, result
)));
1352 /* Restore the return value and note that each value is used. */
1354 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
1355 if ((mode
= apply_result_mode
[regno
]) != VOIDmode
)
1357 align
= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
;
1358 if (size
% align
!= 0)
1359 size
= CEIL (size
, align
) * align
;
1360 reg
= gen_rtx_REG (mode
, INCOMING_REGNO (regno
));
1361 emit_move_insn (reg
, adjust_address (result
, mode
, size
));
1363 push_to_sequence (call_fusage
);
1364 emit_insn (gen_rtx_USE (VOIDmode
, reg
));
1365 call_fusage
= get_insns ();
1367 size
+= GET_MODE_SIZE (mode
);
1370 /* Put the USE insns before the return. */
1371 emit_insn (call_fusage
);
1373 /* Return whatever values was restored by jumping directly to the end
1375 expand_null_return ();
1378 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1380 static enum type_class
1381 type_to_class (tree type
)
1383 switch (TREE_CODE (type
))
1385 case VOID_TYPE
: return void_type_class
;
1386 case INTEGER_TYPE
: return integer_type_class
;
1387 case CHAR_TYPE
: return char_type_class
;
1388 case ENUMERAL_TYPE
: return enumeral_type_class
;
1389 case BOOLEAN_TYPE
: return boolean_type_class
;
1390 case POINTER_TYPE
: return pointer_type_class
;
1391 case REFERENCE_TYPE
: return reference_type_class
;
1392 case OFFSET_TYPE
: return offset_type_class
;
1393 case REAL_TYPE
: return real_type_class
;
1394 case COMPLEX_TYPE
: return complex_type_class
;
1395 case FUNCTION_TYPE
: return function_type_class
;
1396 case METHOD_TYPE
: return method_type_class
;
1397 case RECORD_TYPE
: return record_type_class
;
1399 case QUAL_UNION_TYPE
: return union_type_class
;
1400 case ARRAY_TYPE
: return (TYPE_STRING_FLAG (type
)
1401 ? string_type_class
: array_type_class
);
1402 case SET_TYPE
: return set_type_class
;
1403 case FILE_TYPE
: return file_type_class
;
1404 case LANG_TYPE
: return lang_type_class
;
1405 default: return no_type_class
;
1409 /* Expand a call to __builtin_classify_type with arguments found in
1413 expand_builtin_classify_type (tree arglist
)
1416 return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist
))));
1417 return GEN_INT (no_type_class
);
1420 /* Expand expression EXP, which is a call to __builtin_constant_p. */
1423 expand_builtin_constant_p (tree arglist
, enum machine_mode target_mode
)
1429 arglist
= TREE_VALUE (arglist
);
1431 /* We have taken care of the easy cases during constant folding. This
1432 case is not obvious, so emit (constant_p_rtx (ARGLIST)) and let CSE
1433 get a chance to see if it can deduce whether ARGLIST is constant. */
1435 current_function_calls_constant_p
= 1;
1437 tmp
= expand_expr (arglist
, NULL_RTX
, VOIDmode
, 0);
1438 tmp
= gen_rtx_CONSTANT_P_RTX (target_mode
, tmp
);
1442 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1445 mathfn_built_in (tree type
, enum built_in_function fn
)
1447 enum built_in_function fcode
= NOT_BUILT_IN
;
1448 if (TYPE_MODE (type
) == TYPE_MODE (double_type_node
))
1452 case BUILT_IN_SQRTF
:
1453 case BUILT_IN_SQRTL
:
1454 fcode
= BUILT_IN_SQRT
;
1459 fcode
= BUILT_IN_SIN
;
1464 fcode
= BUILT_IN_COS
;
1469 fcode
= BUILT_IN_EXP
;
1474 fcode
= BUILT_IN_LOG
;
1479 fcode
= BUILT_IN_TAN
;
1482 case BUILT_IN_ATANF
:
1483 case BUILT_IN_ATANL
:
1484 fcode
= BUILT_IN_ATAN
;
1486 case BUILT_IN_FLOOR
:
1487 case BUILT_IN_FLOORF
:
1488 case BUILT_IN_FLOORL
:
1489 fcode
= BUILT_IN_FLOOR
;
1492 case BUILT_IN_CEILF
:
1493 case BUILT_IN_CEILL
:
1494 fcode
= BUILT_IN_CEIL
;
1496 case BUILT_IN_TRUNC
:
1497 case BUILT_IN_TRUNCF
:
1498 case BUILT_IN_TRUNCL
:
1499 fcode
= BUILT_IN_TRUNC
;
1501 case BUILT_IN_ROUND
:
1502 case BUILT_IN_ROUNDF
:
1503 case BUILT_IN_ROUNDL
:
1504 fcode
= BUILT_IN_ROUND
;
1506 case BUILT_IN_NEARBYINT
:
1507 case BUILT_IN_NEARBYINTF
:
1508 case BUILT_IN_NEARBYINTL
:
1509 fcode
= BUILT_IN_NEARBYINT
;
1514 else if (TYPE_MODE (type
) == TYPE_MODE (float_type_node
))
1518 case BUILT_IN_SQRTF
:
1519 case BUILT_IN_SQRTL
:
1520 fcode
= BUILT_IN_SQRTF
;
1525 fcode
= BUILT_IN_SINF
;
1530 fcode
= BUILT_IN_COSF
;
1535 fcode
= BUILT_IN_EXPF
;
1540 fcode
= BUILT_IN_LOGF
;
1545 fcode
= BUILT_IN_TANF
;
1548 case BUILT_IN_ATANF
:
1549 case BUILT_IN_ATANL
:
1550 fcode
= BUILT_IN_ATANF
;
1552 case BUILT_IN_FLOOR
:
1553 case BUILT_IN_FLOORF
:
1554 case BUILT_IN_FLOORL
:
1555 fcode
= BUILT_IN_FLOORF
;
1558 case BUILT_IN_CEILF
:
1559 case BUILT_IN_CEILL
:
1560 fcode
= BUILT_IN_CEILF
;
1562 case BUILT_IN_TRUNC
:
1563 case BUILT_IN_TRUNCF
:
1564 case BUILT_IN_TRUNCL
:
1565 fcode
= BUILT_IN_TRUNCF
;
1567 case BUILT_IN_ROUND
:
1568 case BUILT_IN_ROUNDF
:
1569 case BUILT_IN_ROUNDL
:
1570 fcode
= BUILT_IN_ROUNDF
;
1572 case BUILT_IN_NEARBYINT
:
1573 case BUILT_IN_NEARBYINTF
:
1574 case BUILT_IN_NEARBYINTL
:
1575 fcode
= BUILT_IN_NEARBYINTF
;
1580 else if (TYPE_MODE (type
) == TYPE_MODE (long_double_type_node
))
1584 case BUILT_IN_SQRTF
:
1585 case BUILT_IN_SQRTL
:
1586 fcode
= BUILT_IN_SQRTL
;
1591 fcode
= BUILT_IN_SINL
;
1596 fcode
= BUILT_IN_COSL
;
1601 fcode
= BUILT_IN_EXPL
;
1606 fcode
= BUILT_IN_LOGL
;
1611 fcode
= BUILT_IN_TANL
;
1614 case BUILT_IN_ATANF
:
1615 case BUILT_IN_ATANL
:
1616 fcode
= BUILT_IN_ATANL
;
1618 case BUILT_IN_FLOOR
:
1619 case BUILT_IN_FLOORF
:
1620 case BUILT_IN_FLOORL
:
1621 fcode
= BUILT_IN_FLOORL
;
1624 case BUILT_IN_CEILF
:
1625 case BUILT_IN_CEILL
:
1626 fcode
= BUILT_IN_CEILL
;
1628 case BUILT_IN_TRUNC
:
1629 case BUILT_IN_TRUNCF
:
1630 case BUILT_IN_TRUNCL
:
1631 fcode
= BUILT_IN_TRUNCL
;
1633 case BUILT_IN_ROUND
:
1634 case BUILT_IN_ROUNDF
:
1635 case BUILT_IN_ROUNDL
:
1636 fcode
= BUILT_IN_ROUNDL
;
1638 case BUILT_IN_NEARBYINT
:
1639 case BUILT_IN_NEARBYINTF
:
1640 case BUILT_IN_NEARBYINTL
:
1641 fcode
= BUILT_IN_NEARBYINTL
;
1646 return implicit_built_in_decls
[fcode
];
1649 /* If errno must be maintained, expand the RTL to check if the result,
1650 TARGET, of a built-in function call, EXP, is NaN, and if so set
1654 expand_errno_check (tree exp
, rtx target
)
1658 if (flag_errno_math
&& HONOR_NANS (GET_MODE (target
)))
1660 lab
= gen_label_rtx ();
1662 /* Test the result; if it is NaN, set errno=EDOM because
1663 the argument was not in the domain. */
1664 emit_cmp_and_jump_insns (target
, target
, EQ
, 0, GET_MODE (target
),
1669 #ifdef GEN_ERRNO_RTX
1670 rtx errno_rtx
= GEN_ERRNO_RTX
;
1673 = gen_rtx_MEM (word_mode
, gen_rtx_SYMBOL_REF (Pmode
, "errno"));
1676 emit_move_insn (errno_rtx
, GEN_INT (TARGET_EDOM
));
1679 /* We can't set errno=EDOM directly; let the library call do it.
1680 Pop the arguments right away in case the call gets deleted. */
1682 expand_call (exp
, target
, 0);
1691 /* Expand a call to one of the builtin math functions (sin, cos, or sqrt).
1692 Return 0 if a normal call should be emitted rather than expanding the
1693 function in-line. EXP is the expression that is a call to the builtin
1694 function; if convenient, the result should be placed in TARGET.
1695 SUBTARGET may be used as the target for computing one of EXP's operands. */
1698 expand_builtin_mathfn (tree exp
, rtx target
, rtx subtarget
)
1700 optab builtin_optab
;
1702 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
1703 tree arglist
= TREE_OPERAND (exp
, 1);
1704 enum machine_mode argmode
;
1705 bool errno_set
= false;
1707 if (!validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
1710 /* Stabilize and compute the argument. */
1711 if (TREE_CODE (TREE_VALUE (arglist
)) != VAR_DECL
1712 && TREE_CODE (TREE_VALUE (arglist
)) != PARM_DECL
)
1714 exp
= copy_node (exp
);
1715 TREE_OPERAND (exp
, 1) = arglist
;
1716 /* Wrap the computation of the argument in a SAVE_EXPR. That
1717 way, if we need to expand the argument again (as in the
1718 flag_errno_math case below where we cannot directly set
1719 errno), we will not perform side-effects more than once.
1720 Note that here we're mutating the original EXP as well as the
1721 copy; that's the right thing to do in case the original EXP
1722 is expanded later. */
1723 TREE_VALUE (arglist
) = save_expr (TREE_VALUE (arglist
));
1724 arglist
= copy_node (arglist
);
1726 op0
= expand_expr (TREE_VALUE (arglist
), subtarget
, VOIDmode
, 0);
1728 /* Make a suitable register to place result in. */
1729 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
1734 switch (DECL_FUNCTION_CODE (fndecl
))
1739 builtin_optab
= sin_optab
; break;
1743 builtin_optab
= cos_optab
; break;
1745 case BUILT_IN_SQRTF
:
1746 case BUILT_IN_SQRTL
:
1747 errno_set
= true; builtin_optab
= sqrt_optab
; break;
1751 errno_set
= true; builtin_optab
= exp_optab
; break;
1755 errno_set
= true; builtin_optab
= log_optab
; break;
1759 builtin_optab
= tan_optab
; break;
1761 case BUILT_IN_ATANF
:
1762 case BUILT_IN_ATANL
:
1763 builtin_optab
= atan_optab
; break;
1764 case BUILT_IN_FLOOR
:
1765 case BUILT_IN_FLOORF
:
1766 case BUILT_IN_FLOORL
:
1767 builtin_optab
= floor_optab
; break;
1769 case BUILT_IN_CEILF
:
1770 case BUILT_IN_CEILL
:
1771 builtin_optab
= ceil_optab
; break;
1772 case BUILT_IN_TRUNC
:
1773 case BUILT_IN_TRUNCF
:
1774 case BUILT_IN_TRUNCL
:
1775 builtin_optab
= trunc_optab
; break;
1776 case BUILT_IN_ROUND
:
1777 case BUILT_IN_ROUNDF
:
1778 case BUILT_IN_ROUNDL
:
1779 builtin_optab
= round_optab
; break;
1780 case BUILT_IN_NEARBYINT
:
1781 case BUILT_IN_NEARBYINTF
:
1782 case BUILT_IN_NEARBYINTL
:
1783 builtin_optab
= nearbyint_optab
; break;
1788 /* Compute into TARGET.
1789 Set TARGET to wherever the result comes back. */
1790 argmode
= TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist
)));
1791 target
= expand_unop (argmode
, builtin_optab
, op0
, target
, 0);
1793 /* If we were unable to expand via the builtin, stop the
1794 sequence (without outputting the insns) and return 0, causing
1795 a call to the library function. */
1803 expand_errno_check (exp
, target
);
1805 /* Output the entire sequence. */
1806 insns
= get_insns ();
1813 /* Expand a call to the builtin binary math functions (pow and atan2).
1814 Return 0 if a normal call should be emitted rather than expanding the
1815 function in-line. EXP is the expression that is a call to the builtin
1816 function; if convenient, the result should be placed in TARGET.
1817 SUBTARGET may be used as the target for computing one of EXP's
1821 expand_builtin_mathfn_2 (tree exp
, rtx target
, rtx subtarget
)
1823 optab builtin_optab
;
1824 rtx op0
, op1
, insns
;
1825 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
1826 tree arglist
= TREE_OPERAND (exp
, 1);
1828 enum machine_mode argmode
;
1829 bool errno_set
= true;
1832 if (!validate_arglist (arglist
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
1835 arg0
= TREE_VALUE (arglist
);
1836 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
1838 /* Stabilize the arguments. */
1839 if (TREE_CODE (arg0
) != VAR_DECL
&& TREE_CODE (arg0
) != PARM_DECL
)
1841 arg0
= save_expr (arg0
);
1842 TREE_VALUE (arglist
) = arg0
;
1845 if (TREE_CODE (arg1
) != VAR_DECL
&& TREE_CODE (arg1
) != PARM_DECL
)
1847 arg1
= save_expr (arg1
);
1848 TREE_VALUE (TREE_CHAIN (arglist
)) = arg1
;
1854 exp
= copy_node (exp
);
1855 arglist
= tree_cons (NULL_TREE
, arg0
,
1856 build_tree_list (NULL_TREE
, arg1
));
1857 TREE_OPERAND (exp
, 1) = arglist
;
1860 op0
= expand_expr (arg0
, subtarget
, VOIDmode
, 0);
1861 op1
= expand_expr (arg1
, 0, VOIDmode
, 0);
1863 /* Make a suitable register to place result in. */
1864 target
= gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp
)));
1869 switch (DECL_FUNCTION_CODE (fndecl
))
1874 builtin_optab
= pow_optab
; break;
1875 case BUILT_IN_ATAN2
:
1876 case BUILT_IN_ATAN2F
:
1877 case BUILT_IN_ATAN2L
:
1878 builtin_optab
= atan2_optab
; break;
1883 /* Compute into TARGET.
1884 Set TARGET to wherever the result comes back. */
1885 argmode
= TYPE_MODE (TREE_TYPE (arg0
));
1886 target
= expand_binop (argmode
, builtin_optab
, op0
, op1
,
1887 target
, 0, OPTAB_DIRECT
);
1889 /* If we were unable to expand via the builtin, stop the
1890 sequence (without outputting the insns) and return 0, causing
1891 a call to the library function. */
1899 expand_errno_check (exp
, target
);
1901 /* Output the entire sequence. */
1902 insns
= get_insns ();
1909 /* Expand expression EXP which is a call to the strlen builtin. Return 0
1910 if we failed the caller should emit a normal call, otherwise
1911 try to get the result in TARGET, if convenient. */
1914 expand_builtin_strlen (tree arglist
, rtx target
,
1915 enum machine_mode target_mode
)
1917 if (!validate_arglist (arglist
, POINTER_TYPE
, VOID_TYPE
))
1922 tree len
, src
= TREE_VALUE (arglist
);
1923 rtx result
, src_reg
, char_rtx
, before_strlen
;
1924 enum machine_mode insn_mode
= target_mode
, char_mode
;
1925 enum insn_code icode
= CODE_FOR_nothing
;
1928 /* If the length can be computed at compile-time, return it. */
1929 len
= c_strlen (src
);
1931 return expand_expr (len
, target
, target_mode
, EXPAND_NORMAL
);
1933 align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
1935 /* If SRC is not a pointer type, don't do this operation inline. */
1939 /* Bail out if we can't compute strlen in the right mode. */
1940 while (insn_mode
!= VOIDmode
)
1942 icode
= strlen_optab
->handlers
[(int) insn_mode
].insn_code
;
1943 if (icode
!= CODE_FOR_nothing
)
1946 insn_mode
= GET_MODE_WIDER_MODE (insn_mode
);
1948 if (insn_mode
== VOIDmode
)
1951 /* Make a place to write the result of the instruction. */
1954 && GET_CODE (result
) == REG
1955 && GET_MODE (result
) == insn_mode
1956 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
1957 result
= gen_reg_rtx (insn_mode
);
1959 /* Make a place to hold the source address. We will not expand
1960 the actual source until we are sure that the expansion will
1961 not fail -- there are trees that cannot be expanded twice. */
1962 src_reg
= gen_reg_rtx (Pmode
);
1964 /* Mark the beginning of the strlen sequence so we can emit the
1965 source operand later. */
1966 before_strlen
= get_last_insn ();
1968 char_rtx
= const0_rtx
;
1969 char_mode
= insn_data
[(int) icode
].operand
[2].mode
;
1970 if (! (*insn_data
[(int) icode
].operand
[2].predicate
) (char_rtx
,
1972 char_rtx
= copy_to_mode_reg (char_mode
, char_rtx
);
1974 pat
= GEN_FCN (icode
) (result
, gen_rtx_MEM (BLKmode
, src_reg
),
1975 char_rtx
, GEN_INT (align
));
1980 /* Now that we are assured of success, expand the source. */
1982 pat
= memory_address (BLKmode
,
1983 expand_expr (src
, src_reg
, ptr_mode
, EXPAND_SUM
));
1985 emit_move_insn (src_reg
, pat
);
1990 emit_insn_after (pat
, before_strlen
);
1992 emit_insn_before (pat
, get_insns ());
1994 /* Return the value in the proper mode for this function. */
1995 if (GET_MODE (result
) == target_mode
)
1997 else if (target
!= 0)
1998 convert_move (target
, result
, 0);
2000 target
= convert_to_mode (target_mode
, result
, 0);
2006 /* Expand a call to the strstr builtin. Return 0 if we failed the
2007 caller should emit a normal call, otherwise try to get the result
2008 in TARGET, if convenient (and in mode MODE if that's convenient). */
2011 expand_builtin_strstr (tree arglist
, rtx target
, enum machine_mode mode
)
2013 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2017 tree s1
= TREE_VALUE (arglist
), s2
= TREE_VALUE (TREE_CHAIN (arglist
));
2019 const char *p1
, *p2
;
2028 const char *r
= strstr (p1
, p2
);
2033 /* Return an offset into the constant string argument. */
2034 return expand_expr (fold (build (PLUS_EXPR
, TREE_TYPE (s1
),
2035 s1
, ssize_int (r
- p1
))),
2036 target
, mode
, EXPAND_NORMAL
);
2040 return expand_expr (s1
, target
, mode
, EXPAND_NORMAL
);
2045 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
2049 /* New argument list transforming strstr(s1, s2) to
2050 strchr(s1, s2[0]). */
2052 build_tree_list (NULL_TREE
, build_int_2 (p2
[0], 0));
2053 arglist
= tree_cons (NULL_TREE
, s1
, arglist
);
2054 return expand_expr (build_function_call_expr (fn
, arglist
),
2055 target
, mode
, EXPAND_NORMAL
);
2059 /* Expand a call to the strchr builtin. Return 0 if we failed the
2060 caller should emit a normal call, otherwise try to get the result
2061 in TARGET, if convenient (and in mode MODE if that's convenient). */
2064 expand_builtin_strchr (tree arglist
, rtx target
, enum machine_mode mode
)
2066 if (!validate_arglist (arglist
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2070 tree s1
= TREE_VALUE (arglist
), s2
= TREE_VALUE (TREE_CHAIN (arglist
));
2073 if (TREE_CODE (s2
) != INTEGER_CST
)
2082 if (target_char_cast (s2
, &c
))
2090 /* Return an offset into the constant string argument. */
2091 return expand_expr (fold (build (PLUS_EXPR
, TREE_TYPE (s1
),
2092 s1
, ssize_int (r
- p1
))),
2093 target
, mode
, EXPAND_NORMAL
);
2096 /* FIXME: Should use here strchrM optab so that ports can optimize
2102 /* Expand a call to the strrchr builtin. Return 0 if we failed the
2103 caller should emit a normal call, otherwise try to get the result
2104 in TARGET, if convenient (and in mode MODE if that's convenient). */
2107 expand_builtin_strrchr (tree arglist
, rtx target
, enum machine_mode mode
)
2109 if (!validate_arglist (arglist
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2113 tree s1
= TREE_VALUE (arglist
), s2
= TREE_VALUE (TREE_CHAIN (arglist
));
2117 if (TREE_CODE (s2
) != INTEGER_CST
)
2126 if (target_char_cast (s2
, &c
))
2129 r
= strrchr (p1
, c
);
2134 /* Return an offset into the constant string argument. */
2135 return expand_expr (fold (build (PLUS_EXPR
, TREE_TYPE (s1
),
2136 s1
, ssize_int (r
- p1
))),
2137 target
, mode
, EXPAND_NORMAL
);
2140 if (! integer_zerop (s2
))
2143 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
2147 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
2148 return expand_expr (build_function_call_expr (fn
, arglist
),
2149 target
, mode
, EXPAND_NORMAL
);
2153 /* Expand a call to the strpbrk builtin. Return 0 if we failed the
2154 caller should emit a normal call, otherwise try to get the result
2155 in TARGET, if convenient (and in mode MODE if that's convenient). */
2158 expand_builtin_strpbrk (tree arglist
, rtx target
, enum machine_mode mode
)
2160 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2164 tree s1
= TREE_VALUE (arglist
), s2
= TREE_VALUE (TREE_CHAIN (arglist
));
2166 const char *p1
, *p2
;
2175 const char *r
= strpbrk (p1
, p2
);
2180 /* Return an offset into the constant string argument. */
2181 return expand_expr (fold (build (PLUS_EXPR
, TREE_TYPE (s1
),
2182 s1
, ssize_int (r
- p1
))),
2183 target
, mode
, EXPAND_NORMAL
);
2188 /* strpbrk(x, "") == NULL.
2189 Evaluate and ignore the arguments in case they had
2191 expand_expr (s1
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2196 return 0; /* Really call strpbrk. */
2198 fn
= implicit_built_in_decls
[BUILT_IN_STRCHR
];
2202 /* New argument list transforming strpbrk(s1, s2) to
2203 strchr(s1, s2[0]). */
2205 build_tree_list (NULL_TREE
, build_int_2 (p2
[0], 0));
2206 arglist
= tree_cons (NULL_TREE
, s1
, arglist
);
2207 return expand_expr (build_function_call_expr (fn
, arglist
),
2208 target
, mode
, EXPAND_NORMAL
);
2212 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2213 bytes from constant string DATA + OFFSET and return it as target
2217 builtin_memcpy_read_str (void *data
, HOST_WIDE_INT offset
,
2218 enum machine_mode mode
)
2220 const char *str
= (const char *) data
;
2223 || ((unsigned HOST_WIDE_INT
) offset
+ GET_MODE_SIZE (mode
)
2224 > strlen (str
) + 1))
2225 abort (); /* Attempt to read past the end of constant string. */
2227 return c_readstr (str
+ offset
, mode
);
2230 /* Expand a call to the memcpy builtin, with arguments in ARGLIST.
2231 Return 0 if we failed, the caller should emit a normal call,
2232 otherwise try to get the result in TARGET, if convenient (and in
2233 mode MODE if that's convenient). */
2235 expand_builtin_memcpy (tree arglist
, rtx target
, enum machine_mode mode
)
2237 if (!validate_arglist (arglist
,
2238 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2242 tree dest
= TREE_VALUE (arglist
);
2243 tree src
= TREE_VALUE (TREE_CHAIN (arglist
));
2244 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
2245 const char *src_str
;
2246 unsigned int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
2247 unsigned int dest_align
2248 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
2249 rtx dest_mem
, src_mem
, dest_addr
, len_rtx
;
2251 /* If DEST is not a pointer type, call the normal function. */
2252 if (dest_align
== 0)
2255 /* If the LEN parameter is zero, return DEST. */
2256 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 0)
2258 /* Evaluate and ignore SRC in case it has side-effects. */
2259 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2260 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
2263 /* If either SRC is not a pointer type, don't do this
2264 operation in-line. */
2268 dest_mem
= get_memory_rtx (dest
);
2269 set_mem_align (dest_mem
, dest_align
);
2270 len_rtx
= expand_expr (len
, NULL_RTX
, VOIDmode
, 0);
2271 src_str
= c_getstr (src
);
2273 /* If SRC is a string constant and block move would be done
2274 by pieces, we can avoid loading the string from memory
2275 and only stored the computed constants. */
2277 && GET_CODE (len_rtx
) == CONST_INT
2278 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
2279 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
2280 (void *) src_str
, dest_align
))
2282 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
2283 builtin_memcpy_read_str
,
2284 (void *) src_str
, dest_align
, 0);
2285 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
2286 #ifdef POINTERS_EXTEND_UNSIGNED
2287 if (GET_MODE (dest_mem
) != ptr_mode
)
2288 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
2293 src_mem
= get_memory_rtx (src
);
2294 set_mem_align (src_mem
, src_align
);
2296 /* Copy word part most expediently. */
2297 dest_addr
= emit_block_move (dest_mem
, src_mem
, len_rtx
,
2302 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
2303 #ifdef POINTERS_EXTEND_UNSIGNED
2304 if (GET_MODE (dest_addr
) != ptr_mode
)
2305 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
2312 /* Expand a call to the mempcpy builtin, with arguments in ARGLIST.
2313 Return 0 if we failed the caller should emit a normal call,
2314 otherwise try to get the result in TARGET, if convenient (and in
2315 mode MODE if that's convenient). If ENDP is 0 return the
2316 destination pointer, if ENDP is 1 return the end pointer ala
2317 mempcpy, and if ENDP is 2 return the end pointer minus one ala
2321 expand_builtin_mempcpy (tree arglist
, rtx target
, enum machine_mode mode
,
2324 if (!validate_arglist (arglist
,
2325 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2327 /* If return value is ignored, transform mempcpy into memcpy. */
2328 else if (target
== const0_rtx
)
2330 tree fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
2335 return expand_expr (build_function_call_expr (fn
, arglist
),
2336 target
, mode
, EXPAND_NORMAL
);
2340 tree dest
= TREE_VALUE (arglist
);
2341 tree src
= TREE_VALUE (TREE_CHAIN (arglist
));
2342 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
2343 const char *src_str
;
2344 unsigned int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
2345 unsigned int dest_align
2346 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
2347 rtx dest_mem
, src_mem
, len_rtx
;
2349 /* If DEST is not a pointer type or LEN is not constant,
2350 call the normal function. */
2351 if (dest_align
== 0 || !host_integerp (len
, 1))
2354 /* If the LEN parameter is zero, return DEST. */
2355 if (tree_low_cst (len
, 1) == 0)
2357 /* Evaluate and ignore SRC in case it has side-effects. */
2358 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2359 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
2362 /* If either SRC is not a pointer type, don't do this
2363 operation in-line. */
2367 len_rtx
= expand_expr (len
, NULL_RTX
, VOIDmode
, 0);
2368 src_str
= c_getstr (src
);
2370 /* If SRC is a string constant and block move would be done
2371 by pieces, we can avoid loading the string from memory
2372 and only stored the computed constants. */
2374 && GET_CODE (len_rtx
) == CONST_INT
2375 && (unsigned HOST_WIDE_INT
) INTVAL (len_rtx
) <= strlen (src_str
) + 1
2376 && can_store_by_pieces (INTVAL (len_rtx
), builtin_memcpy_read_str
,
2377 (void *) src_str
, dest_align
))
2379 dest_mem
= get_memory_rtx (dest
);
2380 set_mem_align (dest_mem
, dest_align
);
2381 dest_mem
= store_by_pieces (dest_mem
, INTVAL (len_rtx
),
2382 builtin_memcpy_read_str
,
2383 (void *) src_str
, dest_align
, endp
);
2384 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
2385 #ifdef POINTERS_EXTEND_UNSIGNED
2386 if (GET_MODE (dest_mem
) != ptr_mode
)
2387 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
2392 if (GET_CODE (len_rtx
) == CONST_INT
2393 && can_move_by_pieces (INTVAL (len_rtx
),
2394 MIN (dest_align
, src_align
)))
2396 dest_mem
= get_memory_rtx (dest
);
2397 set_mem_align (dest_mem
, dest_align
);
2398 src_mem
= get_memory_rtx (src
);
2399 set_mem_align (src_mem
, src_align
);
2400 dest_mem
= move_by_pieces (dest_mem
, src_mem
, INTVAL (len_rtx
),
2401 MIN (dest_align
, src_align
), endp
);
2402 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
2403 #ifdef POINTERS_EXTEND_UNSIGNED
2404 if (GET_MODE (dest_mem
) != ptr_mode
)
2405 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
2414 /* Expand expression EXP, which is a call to the memmove builtin. Return 0
2415 if we failed the caller should emit a normal call. */
2418 expand_builtin_memmove (tree arglist
, rtx target
, enum machine_mode mode
)
2420 if (!validate_arglist (arglist
,
2421 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2425 tree dest
= TREE_VALUE (arglist
);
2426 tree src
= TREE_VALUE (TREE_CHAIN (arglist
));
2427 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
2429 unsigned int src_align
= get_pointer_alignment (src
, BIGGEST_ALIGNMENT
);
2430 unsigned int dest_align
2431 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
2433 /* If DEST is not a pointer type, call the normal function. */
2434 if (dest_align
== 0)
2437 /* If the LEN parameter is zero, return DEST. */
2438 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 0)
2440 /* Evaluate and ignore SRC in case it has side-effects. */
2441 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2442 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
2445 /* If either SRC is not a pointer type, don't do this
2446 operation in-line. */
2450 /* If src is categorized for a readonly section we can use
2452 if (readonly_data_expr (src
))
2454 tree
const fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
2457 return expand_expr (build_function_call_expr (fn
, arglist
),
2458 target
, mode
, EXPAND_NORMAL
);
2461 /* Otherwise, call the normal function. */
2466 /* Expand expression EXP, which is a call to the bcopy builtin. Return 0
2467 if we failed the caller should emit a normal call. */
2470 expand_builtin_bcopy (tree arglist
)
2472 tree src
, dest
, size
, newarglist
;
2474 if (!validate_arglist (arglist
,
2475 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2478 src
= TREE_VALUE (arglist
);
2479 dest
= TREE_VALUE (TREE_CHAIN (arglist
));
2480 size
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
2482 /* New argument list transforming bcopy(ptr x, ptr y, int z) to
2483 memmove(ptr y, ptr x, size_t z). This is done this way
2484 so that if it isn't expanded inline, we fallback to
2485 calling bcopy instead of memmove. */
2487 newarglist
= build_tree_list (NULL_TREE
, convert (sizetype
, size
));
2488 newarglist
= tree_cons (NULL_TREE
, src
, newarglist
);
2489 newarglist
= tree_cons (NULL_TREE
, dest
, newarglist
);
2491 return expand_builtin_memmove (newarglist
, const0_rtx
, VOIDmode
);
2494 /* Expand expression EXP, which is a call to the strcpy builtin. Return 0
2495 if we failed the caller should emit a normal call, otherwise try to get
2496 the result in TARGET, if convenient (and in mode MODE if that's
2500 expand_builtin_strcpy (tree arglist
, rtx target
, enum machine_mode mode
)
2504 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2507 fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
2511 len
= c_strlen (TREE_VALUE (TREE_CHAIN (arglist
)));
2515 len
= size_binop (PLUS_EXPR
, len
, ssize_int (1));
2516 chainon (arglist
, build_tree_list (NULL_TREE
, len
));
2517 return expand_expr (build_function_call_expr (fn
, arglist
),
2518 target
, mode
, EXPAND_NORMAL
);
2521 /* Expand a call to the stpcpy builtin, with arguments in ARGLIST.
2522 Return 0 if we failed the caller should emit a normal call,
2523 otherwise try to get the result in TARGET, if convenient (and in
2524 mode MODE if that's convenient). */
2527 expand_builtin_stpcpy (tree arglist
, rtx target
, enum machine_mode mode
)
2529 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2536 /* If return value is ignored, transform stpcpy into strcpy. */
2537 if (target
== const0_rtx
)
2539 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCPY
];
2543 return expand_expr (build_function_call_expr (fn
, arglist
),
2544 target
, mode
, EXPAND_NORMAL
);
2547 /* Ensure we get an actual string whose length can be evaluated at
2548 compile-time, not an expression containing a string. This is
2549 because the latter will potentially produce pessimized code
2550 when used to produce the return value. */
2551 src
= TREE_VALUE (TREE_CHAIN (arglist
));
2552 if (! c_getstr (src
) || ! (len
= c_strlen (src
)))
2555 len
= fold (size_binop (PLUS_EXPR
, len
, ssize_int (1)));
2556 newarglist
= copy_list (arglist
);
2557 chainon (newarglist
, build_tree_list (NULL_TREE
, len
));
2558 return expand_builtin_mempcpy (newarglist
, target
, mode
, /*endp=*/2);
2562 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2563 bytes from constant string DATA + OFFSET and return it as target
2567 builtin_strncpy_read_str (void *data
, HOST_WIDE_INT offset
,
2568 enum machine_mode mode
)
2570 const char *str
= (const char *) data
;
2572 if ((unsigned HOST_WIDE_INT
) offset
> strlen (str
))
2575 return c_readstr (str
+ offset
, mode
);
2578 /* Expand expression EXP, which is a call to the strncpy builtin. Return 0
2579 if we failed the caller should emit a normal call. */
2582 expand_builtin_strncpy (tree arglist
, rtx target
, enum machine_mode mode
)
2584 if (!validate_arglist (arglist
,
2585 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2589 tree slen
= c_strlen (TREE_VALUE (TREE_CHAIN (arglist
)));
2590 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
2593 /* We must be passed a constant len parameter. */
2594 if (TREE_CODE (len
) != INTEGER_CST
)
2597 /* If the len parameter is zero, return the dst parameter. */
2598 if (integer_zerop (len
))
2600 /* Evaluate and ignore the src argument in case it has
2602 expand_expr (TREE_VALUE (TREE_CHAIN (arglist
)), const0_rtx
,
2603 VOIDmode
, EXPAND_NORMAL
);
2604 /* Return the dst parameter. */
2605 return expand_expr (TREE_VALUE (arglist
), target
, mode
,
2609 /* Now, we must be passed a constant src ptr parameter. */
2610 if (slen
== 0 || TREE_CODE (slen
) != INTEGER_CST
)
2613 slen
= size_binop (PLUS_EXPR
, slen
, ssize_int (1));
2615 /* We're required to pad with trailing zeros if the requested
2616 len is greater than strlen(s2)+1. In that case try to
2617 use store_by_pieces, if it fails, punt. */
2618 if (tree_int_cst_lt (slen
, len
))
2620 tree dest
= TREE_VALUE (arglist
);
2621 unsigned int dest_align
2622 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
2623 const char *p
= c_getstr (TREE_VALUE (TREE_CHAIN (arglist
)));
2626 if (!p
|| dest_align
== 0 || !host_integerp (len
, 1)
2627 || !can_store_by_pieces (tree_low_cst (len
, 1),
2628 builtin_strncpy_read_str
,
2629 (void *) p
, dest_align
))
2632 dest_mem
= get_memory_rtx (dest
);
2633 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
2634 builtin_strncpy_read_str
,
2635 (void *) p
, dest_align
, 0);
2636 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
2637 #ifdef POINTERS_EXTEND_UNSIGNED
2638 if (GET_MODE (dest_mem
) != ptr_mode
)
2639 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
2644 /* OK transform into builtin memcpy. */
2645 fn
= implicit_built_in_decls
[BUILT_IN_MEMCPY
];
2648 return expand_expr (build_function_call_expr (fn
, arglist
),
2649 target
, mode
, EXPAND_NORMAL
);
2653 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2654 bytes from constant string DATA + OFFSET and return it as target
2658 builtin_memset_read_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2659 enum machine_mode mode
)
2661 const char *c
= (const char *) data
;
2662 char *p
= alloca (GET_MODE_SIZE (mode
));
2664 memset (p
, *c
, GET_MODE_SIZE (mode
));
2666 return c_readstr (p
, mode
);
2669 /* Callback routine for store_by_pieces. Return the RTL of a register
2670 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
2671 char value given in the RTL register data. For example, if mode is
2672 4 bytes wide, return the RTL for 0x01010101*data. */
2675 builtin_memset_gen_str (void *data
, HOST_WIDE_INT offset ATTRIBUTE_UNUSED
,
2676 enum machine_mode mode
)
2682 size
= GET_MODE_SIZE (mode
);
2687 memset (p
, 1, size
);
2688 coeff
= c_readstr (p
, mode
);
2690 target
= convert_to_mode (mode
, (rtx
) data
, 1);
2691 target
= expand_mult (mode
, target
, coeff
, NULL_RTX
, 1);
2692 return force_reg (mode
, target
);
2695 /* Expand expression EXP, which is a call to the memset builtin. Return 0
2696 if we failed the caller should emit a normal call, otherwise try to get
2697 the result in TARGET, if convenient (and in mode MODE if that's
2701 expand_builtin_memset (tree arglist
, rtx target
, enum machine_mode mode
)
2703 if (!validate_arglist (arglist
,
2704 POINTER_TYPE
, INTEGER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2708 tree dest
= TREE_VALUE (arglist
);
2709 tree val
= TREE_VALUE (TREE_CHAIN (arglist
));
2710 tree len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
2713 unsigned int dest_align
2714 = get_pointer_alignment (dest
, BIGGEST_ALIGNMENT
);
2715 rtx dest_mem
, dest_addr
, len_rtx
;
2717 /* If DEST is not a pointer type, don't do this
2718 operation in-line. */
2719 if (dest_align
== 0)
2722 /* If the LEN parameter is zero, return DEST. */
2723 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 0)
2725 /* Evaluate and ignore VAL in case it has side-effects. */
2726 expand_expr (val
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2727 return expand_expr (dest
, target
, mode
, EXPAND_NORMAL
);
2730 if (TREE_CODE (val
) != INTEGER_CST
)
2734 if (!host_integerp (len
, 1))
2737 if (optimize_size
&& tree_low_cst (len
, 1) > 1)
2740 /* Assume that we can memset by pieces if we can store the
2741 * the coefficients by pieces (in the required modes).
2742 * We can't pass builtin_memset_gen_str as that emits RTL. */
2744 if (!can_store_by_pieces (tree_low_cst (len
, 1),
2745 builtin_memset_read_str
,
2749 val
= fold (build1 (CONVERT_EXPR
, unsigned_char_type_node
, val
));
2750 val_rtx
= expand_expr (val
, NULL_RTX
, VOIDmode
, 0);
2751 val_rtx
= force_reg (TYPE_MODE (unsigned_char_type_node
),
2753 dest_mem
= get_memory_rtx (dest
);
2754 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
2755 builtin_memset_gen_str
,
2756 val_rtx
, dest_align
, 0);
2757 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
2758 #ifdef POINTERS_EXTEND_UNSIGNED
2759 if (GET_MODE (dest_mem
) != ptr_mode
)
2760 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
2765 if (target_char_cast (val
, &c
))
2770 if (!host_integerp (len
, 1))
2772 if (!can_store_by_pieces (tree_low_cst (len
, 1),
2773 builtin_memset_read_str
, &c
,
2777 dest_mem
= get_memory_rtx (dest
);
2778 store_by_pieces (dest_mem
, tree_low_cst (len
, 1),
2779 builtin_memset_read_str
,
2781 dest_mem
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
2782 #ifdef POINTERS_EXTEND_UNSIGNED
2783 if (GET_MODE (dest_mem
) != ptr_mode
)
2784 dest_mem
= convert_memory_address (ptr_mode
, dest_mem
);
2789 len_rtx
= expand_expr (len
, NULL_RTX
, VOIDmode
, 0);
2791 dest_mem
= get_memory_rtx (dest
);
2792 set_mem_align (dest_mem
, dest_align
);
2793 dest_addr
= clear_storage (dest_mem
, len_rtx
);
2797 dest_addr
= force_operand (XEXP (dest_mem
, 0), NULL_RTX
);
2798 #ifdef POINTERS_EXTEND_UNSIGNED
2799 if (GET_MODE (dest_addr
) != ptr_mode
)
2800 dest_addr
= convert_memory_address (ptr_mode
, dest_addr
);
2808 /* Expand expression EXP, which is a call to the bzero builtin. Return 0
2809 if we failed the caller should emit a normal call. */
2812 expand_builtin_bzero (tree arglist
)
2814 tree dest
, size
, newarglist
;
2816 if (!validate_arglist (arglist
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2819 dest
= TREE_VALUE (arglist
);
2820 size
= TREE_VALUE (TREE_CHAIN (arglist
));
2822 /* New argument list transforming bzero(ptr x, int y) to
2823 memset(ptr x, int 0, size_t y). This is done this way
2824 so that if it isn't expanded inline, we fallback to
2825 calling bzero instead of memset. */
2827 newarglist
= build_tree_list (NULL_TREE
, convert (sizetype
, size
));
2828 newarglist
= tree_cons (NULL_TREE
, integer_zero_node
, newarglist
);
2829 newarglist
= tree_cons (NULL_TREE
, dest
, newarglist
);
2831 return expand_builtin_memset (newarglist
, const0_rtx
, VOIDmode
);
2834 /* Expand expression EXP, which is a call to the memcmp built-in function.
2835 ARGLIST is the argument list for this call. Return 0 if we failed and the
2836 caller should emit a normal call, otherwise try to get the result in
2837 TARGET, if convenient (and in mode MODE, if that's convenient). */
2840 expand_builtin_memcmp (tree exp ATTRIBUTE_UNUSED
, tree arglist
, rtx target
,
2841 enum machine_mode mode
)
2843 tree arg1
, arg2
, len
;
2844 const char *p1
, *p2
;
2846 if (!validate_arglist (arglist
,
2847 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
2850 arg1
= TREE_VALUE (arglist
);
2851 arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
2852 len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
2854 /* If the len parameter is zero, return zero. */
2855 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 0)
2857 /* Evaluate and ignore arg1 and arg2 in case they have
2859 expand_expr (arg1
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2860 expand_expr (arg2
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2864 p1
= c_getstr (arg1
);
2865 p2
= c_getstr (arg2
);
2867 /* If all arguments are constant, and the value of len is not greater
2868 than the lengths of arg1 and arg2, evaluate at compile-time. */
2869 if (host_integerp (len
, 1) && p1
&& p2
2870 && compare_tree_int (len
, strlen (p1
) + 1) <= 0
2871 && compare_tree_int (len
, strlen (p2
) + 1) <= 0)
2873 const int r
= memcmp (p1
, p2
, tree_low_cst (len
, 1));
2875 return (r
< 0 ? constm1_rtx
: (r
> 0 ? const1_rtx
: const0_rtx
));
2878 /* If len parameter is one, return an expression corresponding to
2879 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
2880 if (host_integerp (len
, 1) && tree_low_cst (len
, 1) == 1)
2882 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
2883 tree cst_uchar_ptr_node
= build_pointer_type (cst_uchar_node
);
2885 fold (build1 (CONVERT_EXPR
, integer_type_node
,
2886 build1 (INDIRECT_REF
, cst_uchar_node
,
2887 build1 (NOP_EXPR
, cst_uchar_ptr_node
, arg1
))));
2889 fold (build1 (CONVERT_EXPR
, integer_type_node
,
2890 build1 (INDIRECT_REF
, cst_uchar_node
,
2891 build1 (NOP_EXPR
, cst_uchar_ptr_node
, arg2
))));
2892 tree result
= fold (build (MINUS_EXPR
, integer_type_node
, ind1
, ind2
));
2893 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
2896 #ifdef HAVE_cmpstrsi
2898 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
2903 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
2905 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
2906 enum machine_mode insn_mode
2907 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
2909 /* If we don't have POINTER_TYPE, call the function. */
2910 if (arg1_align
== 0 || arg2_align
== 0)
2913 /* Make a place to write the result of the instruction. */
2916 && GET_CODE (result
) == REG
&& GET_MODE (result
) == insn_mode
2917 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
2918 result
= gen_reg_rtx (insn_mode
);
2920 arg1_rtx
= get_memory_rtx (arg1
);
2921 arg2_rtx
= get_memory_rtx (arg2
);
2922 arg3_rtx
= expand_expr (len
, NULL_RTX
, VOIDmode
, 0);
2926 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
2927 GEN_INT (MIN (arg1_align
, arg2_align
)));
2932 emit_library_call_value (memcmp_libfunc
, result
, LCT_PURE_MAKE_BLOCK
,
2933 TYPE_MODE (integer_type_node
), 3,
2934 XEXP (arg1_rtx
, 0), Pmode
,
2935 XEXP (arg2_rtx
, 0), Pmode
,
2936 convert_to_mode (TYPE_MODE (sizetype
), arg3_rtx
,
2937 TREE_UNSIGNED (sizetype
)),
2938 TYPE_MODE (sizetype
));
2940 /* Return the value in the proper mode for this function. */
2941 mode
= TYPE_MODE (TREE_TYPE (exp
));
2942 if (GET_MODE (result
) == mode
)
2944 else if (target
!= 0)
2946 convert_move (target
, result
, 0);
2950 return convert_to_mode (mode
, result
, 0);
2957 /* Expand expression EXP, which is a call to the strcmp builtin. Return 0
2958 if we failed the caller should emit a normal call, otherwise try to get
2959 the result in TARGET, if convenient. */
2962 expand_builtin_strcmp (tree exp
, rtx target
, enum machine_mode mode
)
2964 tree arglist
= TREE_OPERAND (exp
, 1);
2966 const char *p1
, *p2
;
2968 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
2971 arg1
= TREE_VALUE (arglist
);
2972 arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
2974 p1
= c_getstr (arg1
);
2975 p2
= c_getstr (arg2
);
2979 const int i
= strcmp (p1
, p2
);
2980 return (i
< 0 ? constm1_rtx
: (i
> 0 ? const1_rtx
: const0_rtx
));
2983 /* If either arg is "", return an expression corresponding to
2984 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
2985 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
2987 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
2988 tree cst_uchar_ptr_node
= build_pointer_type (cst_uchar_node
);
2990 fold (build1 (CONVERT_EXPR
, integer_type_node
,
2991 build1 (INDIRECT_REF
, cst_uchar_node
,
2992 build1 (NOP_EXPR
, cst_uchar_ptr_node
, arg1
))));
2994 fold (build1 (CONVERT_EXPR
, integer_type_node
,
2995 build1 (INDIRECT_REF
, cst_uchar_node
,
2996 build1 (NOP_EXPR
, cst_uchar_ptr_node
, arg2
))));
2997 tree result
= fold (build (MINUS_EXPR
, integer_type_node
, ind1
, ind2
));
2998 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3001 #ifdef HAVE_cmpstrsi
3004 tree len
, len1
, len2
;
3005 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3009 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
3011 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
3012 enum machine_mode insn_mode
3013 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
3015 len1
= c_strlen (arg1
);
3016 len2
= c_strlen (arg2
);
3019 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
3021 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
3023 /* If we don't have a constant length for the first, use the length
3024 of the second, if we know it. We don't require a constant for
3025 this case; some cost analysis could be done if both are available
3026 but neither is constant. For now, assume they're equally cheap,
3027 unless one has side effects. If both strings have constant lengths,
3034 else if (TREE_SIDE_EFFECTS (len1
))
3036 else if (TREE_SIDE_EFFECTS (len2
))
3038 else if (TREE_CODE (len1
) != INTEGER_CST
)
3040 else if (TREE_CODE (len2
) != INTEGER_CST
)
3042 else if (tree_int_cst_lt (len1
, len2
))
3047 /* If both arguments have side effects, we cannot optimize. */
3048 if (!len
|| TREE_SIDE_EFFECTS (len
))
3051 /* If we don't have POINTER_TYPE, call the function. */
3052 if (arg1_align
== 0 || arg2_align
== 0)
3055 /* Make a place to write the result of the instruction. */
3058 && GET_CODE (result
) == REG
&& GET_MODE (result
) == insn_mode
3059 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3060 result
= gen_reg_rtx (insn_mode
);
3062 arg1_rtx
= get_memory_rtx (arg1
);
3063 arg2_rtx
= get_memory_rtx (arg2
);
3064 arg3_rtx
= expand_expr (len
, NULL_RTX
, VOIDmode
, 0);
3065 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
3066 GEN_INT (MIN (arg1_align
, arg2_align
)));
3072 /* Return the value in the proper mode for this function. */
3073 mode
= TYPE_MODE (TREE_TYPE (exp
));
3074 if (GET_MODE (result
) == mode
)
3077 return convert_to_mode (mode
, result
, 0);
3078 convert_move (target
, result
, 0);
3085 /* Expand expression EXP, which is a call to the strncmp builtin. Return 0
3086 if we failed the caller should emit a normal call, otherwise try to get
3087 the result in TARGET, if convenient. */
3090 expand_builtin_strncmp (tree exp
, rtx target
, enum machine_mode mode
)
3092 tree arglist
= TREE_OPERAND (exp
, 1);
3093 tree arg1
, arg2
, arg3
;
3094 const char *p1
, *p2
;
3096 if (!validate_arglist (arglist
,
3097 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3100 arg1
= TREE_VALUE (arglist
);
3101 arg2
= TREE_VALUE (TREE_CHAIN (arglist
));
3102 arg3
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
3104 /* If the len parameter is zero, return zero. */
3105 if (host_integerp (arg3
, 1) && tree_low_cst (arg3
, 1) == 0)
3107 /* Evaluate and ignore arg1 and arg2 in case they have
3109 expand_expr (arg1
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3110 expand_expr (arg2
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3114 p1
= c_getstr (arg1
);
3115 p2
= c_getstr (arg2
);
3117 /* If all arguments are constant, evaluate at compile-time. */
3118 if (host_integerp (arg3
, 1) && p1
&& p2
)
3120 const int r
= strncmp (p1
, p2
, tree_low_cst (arg3
, 1));
3121 return (r
< 0 ? constm1_rtx
: (r
> 0 ? const1_rtx
: const0_rtx
));
3124 /* If len == 1 or (either string parameter is "" and (len >= 1)),
3125 return (*(const u_char*)arg1 - *(const u_char*)arg2). */
3126 if (host_integerp (arg3
, 1)
3127 && (tree_low_cst (arg3
, 1) == 1
3128 || (tree_low_cst (arg3
, 1) > 1
3129 && ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0')))))
3131 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
3132 tree cst_uchar_ptr_node
= build_pointer_type (cst_uchar_node
);
3134 fold (build1 (CONVERT_EXPR
, integer_type_node
,
3135 build1 (INDIRECT_REF
, cst_uchar_node
,
3136 build1 (NOP_EXPR
, cst_uchar_ptr_node
, arg1
))));
3138 fold (build1 (CONVERT_EXPR
, integer_type_node
,
3139 build1 (INDIRECT_REF
, cst_uchar_node
,
3140 build1 (NOP_EXPR
, cst_uchar_ptr_node
, arg2
))));
3141 tree result
= fold (build (MINUS_EXPR
, integer_type_node
, ind1
, ind2
));
3142 return expand_expr (result
, target
, mode
, EXPAND_NORMAL
);
3145 /* If c_strlen can determine an expression for one of the string
3146 lengths, and it doesn't have side effects, then emit cmpstrsi
3147 using length MIN(strlen(string)+1, arg3). */
3148 #ifdef HAVE_cmpstrsi
3151 tree len
, len1
, len2
;
3152 rtx arg1_rtx
, arg2_rtx
, arg3_rtx
;
3156 = get_pointer_alignment (arg1
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
3158 = get_pointer_alignment (arg2
, BIGGEST_ALIGNMENT
) / BITS_PER_UNIT
;
3159 enum machine_mode insn_mode
3160 = insn_data
[(int) CODE_FOR_cmpstrsi
].operand
[0].mode
;
3162 len1
= c_strlen (arg1
);
3163 len2
= c_strlen (arg2
);
3166 len1
= size_binop (PLUS_EXPR
, ssize_int (1), len1
);
3168 len2
= size_binop (PLUS_EXPR
, ssize_int (1), len2
);
3170 /* If we don't have a constant length for the first, use the length
3171 of the second, if we know it. We don't require a constant for
3172 this case; some cost analysis could be done if both are available
3173 but neither is constant. For now, assume they're equally cheap,
3174 unless one has side effects. If both strings have constant lengths,
3181 else if (TREE_SIDE_EFFECTS (len1
))
3183 else if (TREE_SIDE_EFFECTS (len2
))
3185 else if (TREE_CODE (len1
) != INTEGER_CST
)
3187 else if (TREE_CODE (len2
) != INTEGER_CST
)
3189 else if (tree_int_cst_lt (len1
, len2
))
3194 /* If both arguments have side effects, we cannot optimize. */
3195 if (!len
|| TREE_SIDE_EFFECTS (len
))
3198 /* The actual new length parameter is MIN(len,arg3). */
3199 len
= fold (build (MIN_EXPR
, TREE_TYPE (len
), len
, arg3
));
3201 /* If we don't have POINTER_TYPE, call the function. */
3202 if (arg1_align
== 0 || arg2_align
== 0)
3205 /* Make a place to write the result of the instruction. */
3208 && GET_CODE (result
) == REG
&& GET_MODE (result
) == insn_mode
3209 && REGNO (result
) >= FIRST_PSEUDO_REGISTER
))
3210 result
= gen_reg_rtx (insn_mode
);
3212 arg1_rtx
= get_memory_rtx (arg1
);
3213 arg2_rtx
= get_memory_rtx (arg2
);
3214 arg3_rtx
= expand_expr (len
, NULL_RTX
, VOIDmode
, 0);
3215 insn
= gen_cmpstrsi (result
, arg1_rtx
, arg2_rtx
, arg3_rtx
,
3216 GEN_INT (MIN (arg1_align
, arg2_align
)));
3222 /* Return the value in the proper mode for this function. */
3223 mode
= TYPE_MODE (TREE_TYPE (exp
));
3224 if (GET_MODE (result
) == mode
)
3227 return convert_to_mode (mode
, result
, 0);
3228 convert_move (target
, result
, 0);
3235 /* Expand expression EXP, which is a call to the strcat builtin.
3236 Return 0 if we failed the caller should emit a normal call,
3237 otherwise try to get the result in TARGET, if convenient. */
3240 expand_builtin_strcat (tree arglist
, rtx target
, enum machine_mode mode
)
3242 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3246 tree dst
= TREE_VALUE (arglist
),
3247 src
= TREE_VALUE (TREE_CHAIN (arglist
));
3248 const char *p
= c_getstr (src
);
3250 /* If the string length is zero, return the dst parameter. */
3251 if (p
&& *p
== '\0')
3252 return expand_expr (dst
, target
, mode
, EXPAND_NORMAL
);
3258 /* Expand expression EXP, which is a call to the strncat builtin.
3259 Return 0 if we failed the caller should emit a normal call,
3260 otherwise try to get the result in TARGET, if convenient. */
3263 expand_builtin_strncat (tree arglist
, rtx target
, enum machine_mode mode
)
3265 if (!validate_arglist (arglist
,
3266 POINTER_TYPE
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
3270 tree dst
= TREE_VALUE (arglist
),
3271 src
= TREE_VALUE (TREE_CHAIN (arglist
)),
3272 len
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
3273 const char *p
= c_getstr (src
);
3275 /* If the requested length is zero, or the src parameter string
3276 length is zero, return the dst parameter. */
3277 if (integer_zerop (len
) || (p
&& *p
== '\0'))
3279 /* Evaluate and ignore the src and len parameters in case
3280 they have side-effects. */
3281 expand_expr (src
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3282 expand_expr (len
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3283 return expand_expr (dst
, target
, mode
, EXPAND_NORMAL
);
3286 /* If the requested len is greater than or equal to the string
3287 length, call strcat. */
3288 if (TREE_CODE (len
) == INTEGER_CST
&& p
3289 && compare_tree_int (len
, strlen (p
)) >= 0)
3292 = tree_cons (NULL_TREE
, dst
, build_tree_list (NULL_TREE
, src
));
3293 tree fn
= implicit_built_in_decls
[BUILT_IN_STRCAT
];
3295 /* If the replacement _DECL isn't initialized, don't do the
3300 return expand_expr (build_function_call_expr (fn
, newarglist
),
3301 target
, mode
, EXPAND_NORMAL
);
3307 /* Expand expression EXP, which is a call to the strspn builtin.
3308 Return 0 if we failed the caller should emit a normal call,
3309 otherwise try to get the result in TARGET, if convenient. */
3312 expand_builtin_strspn (tree arglist
, rtx target
, enum machine_mode mode
)
3314 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3318 tree s1
= TREE_VALUE (arglist
), s2
= TREE_VALUE (TREE_CHAIN (arglist
));
3319 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
3321 /* If both arguments are constants, evaluate at compile-time. */
3324 const size_t r
= strspn (p1
, p2
);
3325 return expand_expr (size_int (r
), target
, mode
, EXPAND_NORMAL
);
3328 /* If either argument is "", return 0. */
3329 if ((p1
&& *p1
== '\0') || (p2
&& *p2
== '\0'))
3331 /* Evaluate and ignore both arguments in case either one has
3333 expand_expr (s1
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3334 expand_expr (s2
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3341 /* Expand expression EXP, which is a call to the strcspn builtin.
3342 Return 0 if we failed the caller should emit a normal call,
3343 otherwise try to get the result in TARGET, if convenient. */
3346 expand_builtin_strcspn (tree arglist
, rtx target
, enum machine_mode mode
)
3348 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3352 tree s1
= TREE_VALUE (arglist
), s2
= TREE_VALUE (TREE_CHAIN (arglist
));
3353 const char *p1
= c_getstr (s1
), *p2
= c_getstr (s2
);
3355 /* If both arguments are constants, evaluate at compile-time. */
3358 const size_t r
= strcspn (p1
, p2
);
3359 return expand_expr (size_int (r
), target
, mode
, EXPAND_NORMAL
);
3362 /* If the first argument is "", return 0. */
3363 if (p1
&& *p1
== '\0')
3365 /* Evaluate and ignore argument s2 in case it has
3367 expand_expr (s2
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3371 /* If the second argument is "", return __builtin_strlen(s1). */
3372 if (p2
&& *p2
== '\0')
3374 tree newarglist
= build_tree_list (NULL_TREE
, s1
),
3375 fn
= implicit_built_in_decls
[BUILT_IN_STRLEN
];
3377 /* If the replacement _DECL isn't initialized, don't do the
3382 return expand_expr (build_function_call_expr (fn
, newarglist
),
3383 target
, mode
, EXPAND_NORMAL
);
3389 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3390 if that's convenient. */
3393 expand_builtin_saveregs (void)
3397 /* Don't do __builtin_saveregs more than once in a function.
3398 Save the result of the first call and reuse it. */
3399 if (saveregs_value
!= 0)
3400 return saveregs_value
;
3402 /* When this function is called, it means that registers must be
3403 saved on entry to this function. So we migrate the call to the
3404 first insn of this function. */
3408 #ifdef EXPAND_BUILTIN_SAVEREGS
3409 /* Do whatever the machine needs done in this case. */
3410 val
= EXPAND_BUILTIN_SAVEREGS ();
3412 /* ??? We used to try and build up a call to the out of line function,
3413 guessing about what registers needed saving etc. This became much
3414 harder with __builtin_va_start, since we don't have a tree for a
3415 call to __builtin_saveregs to fall back on. There was exactly one
3416 port (i860) that used this code, and I'm unconvinced it could actually
3417 handle the general case. So we no longer try to handle anything
3418 weird and make the backend absorb the evil. */
3420 error ("__builtin_saveregs not supported by this target");
3427 saveregs_value
= val
;
3429 /* Put the insns after the NOTE that starts the function. If this
3430 is inside a start_sequence, make the outer-level insn chain current, so
3431 the code is placed at the start of the function. */
3432 push_topmost_sequence ();
3433 emit_insn_after (seq
, get_insns ());
3434 pop_topmost_sequence ();
3439 /* __builtin_args_info (N) returns word N of the arg space info
3440 for the current function. The number and meanings of words
3441 is controlled by the definition of CUMULATIVE_ARGS. */
3444 expand_builtin_args_info (tree arglist
)
3446 int nwords
= sizeof (CUMULATIVE_ARGS
) / sizeof (int);
3447 int *word_ptr
= (int *) ¤t_function_args_info
;
3449 if (sizeof (CUMULATIVE_ARGS
) % sizeof (int) != 0)
3454 if (!host_integerp (TREE_VALUE (arglist
), 0))
3455 error ("argument of `__builtin_args_info' must be constant");
3458 HOST_WIDE_INT wordnum
= tree_low_cst (TREE_VALUE (arglist
), 0);
3460 if (wordnum
< 0 || wordnum
>= nwords
)
3461 error ("argument of `__builtin_args_info' out of range");
3463 return GEN_INT (word_ptr
[wordnum
]);
3467 error ("missing argument in `__builtin_args_info'");
3472 /* Expand ARGLIST, from a call to __builtin_next_arg. */
3475 expand_builtin_next_arg (tree arglist
)
3477 tree fntype
= TREE_TYPE (current_function_decl
);
3479 if (TYPE_ARG_TYPES (fntype
) == 0
3480 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
3483 error ("`va_start' used in function with fixed args");
3489 tree last_parm
= tree_last (DECL_ARGUMENTS (current_function_decl
));
3490 tree arg
= TREE_VALUE (arglist
);
3492 /* Strip off all nops for the sake of the comparison. This
3493 is not quite the same as STRIP_NOPS. It does more.
3494 We must also strip off INDIRECT_EXPR for C++ reference
3496 while (TREE_CODE (arg
) == NOP_EXPR
3497 || TREE_CODE (arg
) == CONVERT_EXPR
3498 || TREE_CODE (arg
) == NON_LVALUE_EXPR
3499 || TREE_CODE (arg
) == INDIRECT_REF
)
3500 arg
= TREE_OPERAND (arg
, 0);
3501 if (arg
!= last_parm
)
3502 warning ("second parameter of `va_start' not last named argument");
3505 /* Evidently an out of date version of <stdarg.h>; can't validate
3506 va_start's second argument, but can still work as intended. */
3507 warning ("`__builtin_next_arg' called without an argument");
3509 return expand_binop (Pmode
, add_optab
,
3510 current_function_internal_arg_pointer
,
3511 current_function_arg_offset_rtx
,
3512 NULL_RTX
, 0, OPTAB_LIB_WIDEN
);
3515 /* Make it easier for the backends by protecting the valist argument
3516 from multiple evaluations. */
3519 stabilize_va_list (tree valist
, int needs_lvalue
)
3521 if (TREE_CODE (va_list_type_node
) == ARRAY_TYPE
)
3523 if (TREE_SIDE_EFFECTS (valist
))
3524 valist
= save_expr (valist
);
3526 /* For this case, the backends will be expecting a pointer to
3527 TREE_TYPE (va_list_type_node), but it's possible we've
3528 actually been given an array (an actual va_list_type_node).
3530 if (TREE_CODE (TREE_TYPE (valist
)) == ARRAY_TYPE
)
3532 tree p1
= build_pointer_type (TREE_TYPE (va_list_type_node
));
3533 tree p2
= build_pointer_type (va_list_type_node
);
3535 valist
= build1 (ADDR_EXPR
, p2
, valist
);
3536 valist
= fold (build1 (NOP_EXPR
, p1
, valist
));
3545 if (! TREE_SIDE_EFFECTS (valist
))
3548 pt
= build_pointer_type (va_list_type_node
);
3549 valist
= fold (build1 (ADDR_EXPR
, pt
, valist
));
3550 TREE_SIDE_EFFECTS (valist
) = 1;
3553 if (TREE_SIDE_EFFECTS (valist
))
3554 valist
= save_expr (valist
);
3555 valist
= fold (build1 (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (valist
)),
3562 /* The "standard" implementation of va_start: just assign `nextarg' to
3566 std_expand_builtin_va_start (tree valist
, rtx nextarg
)
3570 t
= build (MODIFY_EXPR
, TREE_TYPE (valist
), valist
,
3571 make_tree (ptr_type_node
, nextarg
));
3572 TREE_SIDE_EFFECTS (t
) = 1;
3574 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3577 /* Expand ARGLIST, from a call to __builtin_va_start. */
3580 expand_builtin_va_start (tree arglist
)
3585 chain
= TREE_CHAIN (arglist
);
3587 if (TREE_CHAIN (chain
))
3588 error ("too many arguments to function `va_start'");
3590 nextarg
= expand_builtin_next_arg (chain
);
3591 valist
= stabilize_va_list (TREE_VALUE (arglist
), 1);
3593 #ifdef EXPAND_BUILTIN_VA_START
3594 EXPAND_BUILTIN_VA_START (valist
, nextarg
);
3596 std_expand_builtin_va_start (valist
, nextarg
);
3602 /* The "standard" implementation of va_arg: read the value from the
3603 current (padded) address and increment by the (padded) size. */
3606 std_expand_builtin_va_arg (tree valist
, tree type
)
3608 tree addr_tree
, t
, type_size
= NULL
;
3609 tree align
, alignm1
;
3613 /* Compute the rounded size of the type. */
3614 align
= size_int (PARM_BOUNDARY
/ BITS_PER_UNIT
);
3615 alignm1
= size_int (PARM_BOUNDARY
/ BITS_PER_UNIT
- 1);
3616 if (type
== error_mark_node
3617 || (type_size
= TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type
))) == NULL
3618 || TREE_OVERFLOW (type_size
))
3619 rounded_size
= size_zero_node
;
3621 rounded_size
= fold (build (MULT_EXPR
, sizetype
,
3622 fold (build (TRUNC_DIV_EXPR
, sizetype
,
3623 fold (build (PLUS_EXPR
, sizetype
,
3624 type_size
, alignm1
)),
3630 if (PAD_VARARGS_DOWN
&& ! integer_zerop (rounded_size
))
3632 /* Small args are padded downward. */
3633 addr_tree
= fold (build (PLUS_EXPR
, TREE_TYPE (addr_tree
), addr_tree
,
3634 fold (build (COND_EXPR
, sizetype
,
3635 fold (build (GT_EXPR
, sizetype
,
3639 fold (build (MINUS_EXPR
, sizetype
,
3644 addr
= expand_expr (addr_tree
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
3645 addr
= copy_to_reg (addr
);
3647 /* Compute new value for AP. */
3648 if (! integer_zerop (rounded_size
))
3650 t
= build (MODIFY_EXPR
, TREE_TYPE (valist
), valist
,
3651 build (PLUS_EXPR
, TREE_TYPE (valist
), valist
,
3653 TREE_SIDE_EFFECTS (t
) = 1;
3654 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3660 /* Expand __builtin_va_arg, which is not really a builtin function, but
3661 a very special sort of operator. */
3664 expand_builtin_va_arg (tree valist
, tree type
)
3667 tree promoted_type
, want_va_type
, have_va_type
;
3669 /* Verify that valist is of the proper type. */
3671 want_va_type
= va_list_type_node
;
3672 have_va_type
= TREE_TYPE (valist
);
3673 if (TREE_CODE (want_va_type
) == ARRAY_TYPE
)
3675 /* If va_list is an array type, the argument may have decayed
3676 to a pointer type, e.g. by being passed to another function.
3677 In that case, unwrap both types so that we can compare the
3678 underlying records. */
3679 if (TREE_CODE (have_va_type
) == ARRAY_TYPE
3680 || TREE_CODE (have_va_type
) == POINTER_TYPE
)
3682 want_va_type
= TREE_TYPE (want_va_type
);
3683 have_va_type
= TREE_TYPE (have_va_type
);
3686 if (TYPE_MAIN_VARIANT (want_va_type
) != TYPE_MAIN_VARIANT (have_va_type
))
3688 error ("first argument to `va_arg' not of type `va_list'");
3692 /* Generate a diagnostic for requesting data of a type that cannot
3693 be passed through `...' due to type promotion at the call site. */
3694 else if ((promoted_type
= (*lang_hooks
.types
.type_promotes_to
) (type
))
3697 const char *name
= "<anonymous type>", *pname
= 0;
3698 static bool gave_help
;
3700 if (TYPE_NAME (type
))
3702 if (TREE_CODE (TYPE_NAME (type
)) == IDENTIFIER_NODE
)
3703 name
= IDENTIFIER_POINTER (TYPE_NAME (type
));
3704 else if (TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
3705 && DECL_NAME (TYPE_NAME (type
)))
3706 name
= IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type
)));
3708 if (TYPE_NAME (promoted_type
))
3710 if (TREE_CODE (TYPE_NAME (promoted_type
)) == IDENTIFIER_NODE
)
3711 pname
= IDENTIFIER_POINTER (TYPE_NAME (promoted_type
));
3712 else if (TREE_CODE (TYPE_NAME (promoted_type
)) == TYPE_DECL
3713 && DECL_NAME (TYPE_NAME (promoted_type
)))
3714 pname
= IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (promoted_type
)));
3717 /* Unfortunately, this is merely undefined, rather than a constraint
3718 violation, so we cannot make this an error. If this call is never
3719 executed, the program is still strictly conforming. */
3720 warning ("`%s' is promoted to `%s' when passed through `...'",
3725 warning ("(so you should pass `%s' not `%s' to `va_arg')",
3729 /* We can, however, treat "undefined" any way we please.
3730 Call abort to encourage the user to fix the program. */
3731 expand_builtin_trap ();
3733 /* This is dead code, but go ahead and finish so that the
3734 mode of the result comes out right. */
3739 /* Make it easier for the backends by protecting the valist argument
3740 from multiple evaluations. */
3741 valist
= stabilize_va_list (valist
, 0);
3743 #ifdef EXPAND_BUILTIN_VA_ARG
3744 addr
= EXPAND_BUILTIN_VA_ARG (valist
, type
);
3746 addr
= std_expand_builtin_va_arg (valist
, type
);
3750 #ifdef POINTERS_EXTEND_UNSIGNED
3751 if (GET_MODE (addr
) != Pmode
)
3752 addr
= convert_memory_address (Pmode
, addr
);
3755 result
= gen_rtx_MEM (TYPE_MODE (type
), addr
);
3756 set_mem_alias_set (result
, get_varargs_alias_set ());
3761 /* Expand ARGLIST, from a call to __builtin_va_end. */
3764 expand_builtin_va_end (tree arglist
)
3766 tree valist
= TREE_VALUE (arglist
);
3768 #ifdef EXPAND_BUILTIN_VA_END
3769 valist
= stabilize_va_list (valist
, 0);
3770 EXPAND_BUILTIN_VA_END (arglist
);
3772 /* Evaluate for side effects, if needed. I hate macros that don't
3774 if (TREE_SIDE_EFFECTS (valist
))
3775 expand_expr (valist
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3781 /* Expand ARGLIST, from a call to __builtin_va_copy. We do this as a
3782 builtin rather than just as an assignment in stdarg.h because of the
3783 nastiness of array-type va_list types. */
3786 expand_builtin_va_copy (tree arglist
)
3790 dst
= TREE_VALUE (arglist
);
3791 src
= TREE_VALUE (TREE_CHAIN (arglist
));
3793 dst
= stabilize_va_list (dst
, 1);
3794 src
= stabilize_va_list (src
, 0);
3796 if (TREE_CODE (va_list_type_node
) != ARRAY_TYPE
)
3798 t
= build (MODIFY_EXPR
, va_list_type_node
, dst
, src
);
3799 TREE_SIDE_EFFECTS (t
) = 1;
3800 expand_expr (t
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3804 rtx dstb
, srcb
, size
;
3806 /* Evaluate to pointers. */
3807 dstb
= expand_expr (dst
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
3808 srcb
= expand_expr (src
, NULL_RTX
, Pmode
, EXPAND_NORMAL
);
3809 size
= expand_expr (TYPE_SIZE_UNIT (va_list_type_node
), NULL_RTX
,
3810 VOIDmode
, EXPAND_NORMAL
);
3812 #ifdef POINTERS_EXTEND_UNSIGNED
3813 if (GET_MODE (dstb
) != Pmode
)
3814 dstb
= convert_memory_address (Pmode
, dstb
);
3816 if (GET_MODE (srcb
) != Pmode
)
3817 srcb
= convert_memory_address (Pmode
, srcb
);
3820 /* "Dereference" to BLKmode memories. */
3821 dstb
= gen_rtx_MEM (BLKmode
, dstb
);
3822 set_mem_alias_set (dstb
, get_alias_set (TREE_TYPE (TREE_TYPE (dst
))));
3823 set_mem_align (dstb
, TYPE_ALIGN (va_list_type_node
));
3824 srcb
= gen_rtx_MEM (BLKmode
, srcb
);
3825 set_mem_alias_set (srcb
, get_alias_set (TREE_TYPE (TREE_TYPE (src
))));
3826 set_mem_align (srcb
, TYPE_ALIGN (va_list_type_node
));
3829 emit_block_move (dstb
, srcb
, size
, BLOCK_OP_NORMAL
);
3835 /* Expand a call to one of the builtin functions __builtin_frame_address or
3836 __builtin_return_address. */
3839 expand_builtin_frame_address (tree fndecl
, tree arglist
)
3841 /* The argument must be a nonnegative integer constant.
3842 It counts the number of frames to scan up the stack.
3843 The value is the return address saved in that frame. */
3845 /* Warning about missing arg was already issued. */
3847 else if (! host_integerp (TREE_VALUE (arglist
), 1))
3849 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
3850 error ("invalid arg to `__builtin_frame_address'");
3852 error ("invalid arg to `__builtin_return_address'");
3858 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl
),
3859 tree_low_cst (TREE_VALUE (arglist
), 1),
3860 hard_frame_pointer_rtx
);
3862 /* Some ports cannot access arbitrary stack frames. */
3865 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
3866 warning ("unsupported arg to `__builtin_frame_address'");
3868 warning ("unsupported arg to `__builtin_return_address'");
3872 /* For __builtin_frame_address, return what we've got. */
3873 if (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_FRAME_ADDRESS
)
3876 if (GET_CODE (tem
) != REG
3877 && ! CONSTANT_P (tem
))
3878 tem
= copy_to_mode_reg (Pmode
, tem
);
3883 /* Expand a call to the alloca builtin, with arguments ARGLIST. Return 0 if
3884 we failed and the caller should emit a normal call, otherwise try to get
3885 the result in TARGET, if convenient. */
3888 expand_builtin_alloca (tree arglist
, rtx target
)
3893 if (!validate_arglist (arglist
, INTEGER_TYPE
, VOID_TYPE
))
3896 /* Compute the argument. */
3897 op0
= expand_expr (TREE_VALUE (arglist
), NULL_RTX
, VOIDmode
, 0);
3899 /* Allocate the desired space. */
3900 result
= allocate_dynamic_stack_space (op0
, target
, BITS_PER_UNIT
);
3902 #ifdef POINTERS_EXTEND_UNSIGNED
3903 if (GET_MODE (result
) != ptr_mode
)
3904 result
= convert_memory_address (ptr_mode
, result
);
3910 /* Expand a call to a unary builtin. The arguments are in ARGLIST.
3911 Return 0 if a normal call should be emitted rather than expanding the
3912 function in-line. If convenient, the result should be placed in TARGET.
3913 SUBTARGET may be used as the target for computing one of EXP's operands. */
3916 expand_builtin_unop (enum machine_mode target_mode
, tree arglist
, rtx target
,
3917 rtx subtarget
, optab op_optab
)
3920 if (!validate_arglist (arglist
, INTEGER_TYPE
, VOID_TYPE
))
3923 /* Compute the argument. */
3924 op0
= expand_expr (TREE_VALUE (arglist
), subtarget
, VOIDmode
, 0);
3925 /* Compute op, into TARGET if possible.
3926 Set TARGET to wherever the result comes back. */
3927 target
= expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist
))),
3928 op_optab
, op0
, target
, 1);
3932 return convert_to_mode (target_mode
, target
, 0);
3935 /* If the string passed to fputs is a constant and is one character
3936 long, we attempt to transform this call into __builtin_fputc(). */
3939 expand_builtin_fputs (tree arglist
, int ignore
, int unlocked
)
3942 tree fn_fputc
= unlocked
? implicit_built_in_decls
[BUILT_IN_FPUTC_UNLOCKED
]
3943 : implicit_built_in_decls
[BUILT_IN_FPUTC
];
3944 tree fn_fwrite
= unlocked
? implicit_built_in_decls
[BUILT_IN_FWRITE_UNLOCKED
]
3945 : implicit_built_in_decls
[BUILT_IN_FWRITE
];
3947 /* If the return value is used, or the replacement _DECL isn't
3948 initialized, don't do the transformation. */
3949 if (!ignore
|| !fn_fputc
|| !fn_fwrite
)
3952 /* Verify the arguments in the original call. */
3953 if (!validate_arglist (arglist
, POINTER_TYPE
, POINTER_TYPE
, VOID_TYPE
))
3956 /* Get the length of the string passed to fputs. If the length
3957 can't be determined, punt. */
3958 if (!(len
= c_strlen (TREE_VALUE (arglist
)))
3959 || TREE_CODE (len
) != INTEGER_CST
)
3962 switch (compare_tree_int (len
, 1))
3964 case -1: /* length is 0, delete the call entirely . */
3966 /* Evaluate and ignore the argument in case it has
3968 expand_expr (TREE_VALUE (TREE_CHAIN (arglist
)), const0_rtx
,
3969 VOIDmode
, EXPAND_NORMAL
);
3972 case 0: /* length is 1, call fputc. */
3974 const char *p
= c_getstr (TREE_VALUE (arglist
));
3978 /* New argument list transforming fputs(string, stream) to
3979 fputc(string[0], stream). */
3981 build_tree_list (NULL_TREE
, TREE_VALUE (TREE_CHAIN (arglist
)));
3983 tree_cons (NULL_TREE
, build_int_2 (p
[0], 0), arglist
);
3989 case 1: /* length is greater than 1, call fwrite. */
3993 /* If optimizing for size keep fputs. */
3996 string_arg
= TREE_VALUE (arglist
);
3997 /* New argument list transforming fputs(string, stream) to
3998 fwrite(string, 1, len, stream). */
3999 arglist
= build_tree_list (NULL_TREE
, TREE_VALUE (TREE_CHAIN (arglist
)));
4000 arglist
= tree_cons (NULL_TREE
, len
, arglist
);
4001 arglist
= tree_cons (NULL_TREE
, size_one_node
, arglist
);
4002 arglist
= tree_cons (NULL_TREE
, string_arg
, arglist
);
4010 return expand_expr (build_function_call_expr (fn
, arglist
),
4011 (ignore
? const0_rtx
: NULL_RTX
),
4012 VOIDmode
, EXPAND_NORMAL
);
4015 /* Expand a call to __builtin_expect. We return our argument and emit a
4016 NOTE_INSN_EXPECTED_VALUE note. This is the expansion of __builtin_expect in
4017 a non-jump context. */
4020 expand_builtin_expect (tree arglist
, rtx target
)
4025 if (arglist
== NULL_TREE
4026 || TREE_CHAIN (arglist
) == NULL_TREE
)
4028 exp
= TREE_VALUE (arglist
);
4029 c
= TREE_VALUE (TREE_CHAIN (arglist
));
4031 if (TREE_CODE (c
) != INTEGER_CST
)
4033 error ("second arg to `__builtin_expect' must be a constant");
4034 c
= integer_zero_node
;
4037 target
= expand_expr (exp
, target
, VOIDmode
, EXPAND_NORMAL
);
4039 /* Don't bother with expected value notes for integral constants. */
4040 if (flag_guess_branch_prob
&& GET_CODE (target
) != CONST_INT
)
4042 /* We do need to force this into a register so that we can be
4043 moderately sure to be able to correctly interpret the branch
4045 target
= force_reg (GET_MODE (target
), target
);
4047 rtx_c
= expand_expr (c
, NULL_RTX
, GET_MODE (target
), EXPAND_NORMAL
);
4049 note
= emit_note (NULL
, NOTE_INSN_EXPECTED_VALUE
);
4050 NOTE_EXPECTED_VALUE (note
) = gen_rtx_EQ (VOIDmode
, target
, rtx_c
);
4056 /* Like expand_builtin_expect, except do this in a jump context. This is
4057 called from do_jump if the conditional is a __builtin_expect. Return either
4058 a list of insns to emit the jump or NULL if we cannot optimize
4059 __builtin_expect. We need to optimize this at jump time so that machines
4060 like the PowerPC don't turn the test into a SCC operation, and then jump
4061 based on the test being 0/1. */
4064 expand_builtin_expect_jump (tree exp
, rtx if_false_label
, rtx if_true_label
)
4066 tree arglist
= TREE_OPERAND (exp
, 1);
4067 tree arg0
= TREE_VALUE (arglist
);
4068 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4071 /* Only handle __builtin_expect (test, 0) and
4072 __builtin_expect (test, 1). */
4073 if (TREE_CODE (TREE_TYPE (arg1
)) == INTEGER_TYPE
4074 && (integer_zerop (arg1
) || integer_onep (arg1
)))
4079 /* If we fail to locate an appropriate conditional jump, we'll
4080 fall back to normal evaluation. Ensure that the expression
4081 can be re-evaluated. */
4082 switch (unsafe_for_reeval (arg0
))
4087 case 1: /* Mildly unsafe. */
4088 arg0
= unsave_expr (arg0
);
4091 case 2: /* Wildly unsafe. */
4095 /* Expand the jump insns. */
4097 do_jump (arg0
, if_false_label
, if_true_label
);
4101 /* Now that the __builtin_expect has been validated, go through and add
4102 the expect's to each of the conditional jumps. If we run into an
4103 error, just give up and generate the 'safe' code of doing a SCC
4104 operation and then doing a branch on that. */
4106 while (insn
!= NULL_RTX
)
4108 rtx next
= NEXT_INSN (insn
);
4110 if (GET_CODE (insn
) == JUMP_INSN
&& any_condjump_p (insn
))
4112 rtx ifelse
= SET_SRC (pc_set (insn
));
4116 if (GET_CODE (XEXP (ifelse
, 1)) == LABEL_REF
)
4119 label
= XEXP (XEXP (ifelse
, 1), 0);
4121 /* An inverted jump reverses the probabilities. */
4122 else if (GET_CODE (XEXP (ifelse
, 2)) == LABEL_REF
)
4125 label
= XEXP (XEXP (ifelse
, 2), 0);
4127 /* We shouldn't have to worry about conditional returns during
4128 the expansion stage, but handle it gracefully anyway. */
4129 else if (GET_CODE (XEXP (ifelse
, 1)) == RETURN
)
4134 /* An inverted return reverses the probabilities. */
4135 else if (GET_CODE (XEXP (ifelse
, 2)) == RETURN
)
4143 /* If the test is expected to fail, reverse the
4145 if (integer_zerop (arg1
))
4148 /* If we are jumping to the false label, reverse the
4150 if (label
== NULL_RTX
)
4151 ; /* conditional return */
4152 else if (label
== if_false_label
)
4154 else if (label
!= if_true_label
)
4158 predict_insn_def (insn
, PRED_BUILTIN_EXPECT
, taken
);
4165 /* If no jumps were modified, fail and do __builtin_expect the normal
4175 expand_builtin_trap (void)
4179 emit_insn (gen_trap ());
4182 emit_library_call (abort_libfunc
, LCT_NORETURN
, VOIDmode
, 0);
4186 /* Expand a call to fabs, fabsf or fabsl with arguments ARGLIST.
4187 Return 0 if a normal call should be emitted rather than expanding
4188 the function inline. If convenient, the result should be placed
4189 in TARGET. SUBTARGET may be used as the target for computing
4193 expand_builtin_fabs (tree arglist
, rtx target
, rtx subtarget
)
4195 enum machine_mode mode
;
4199 if (!validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
4202 arg
= TREE_VALUE (arglist
);
4203 mode
= TYPE_MODE (TREE_TYPE (arg
));
4204 op0
= expand_expr (arg
, subtarget
, VOIDmode
, 0);
4205 return expand_abs (mode
, op0
, target
, 0, safe_from_p (target
, arg
, 1));
4208 /* Expand a call to cabs, cabsf or cabsl with arguments ARGLIST.
4209 Return 0 if a normal call should be emitted rather than expanding
4210 the function inline. If convenient, the result should be placed
4214 expand_builtin_cabs (tree arglist
, rtx target
)
4216 enum machine_mode mode
;
4220 if (arglist
== 0 || TREE_CHAIN (arglist
))
4222 arg
= TREE_VALUE (arglist
);
4223 if (TREE_CODE (TREE_TYPE (arg
)) != COMPLEX_TYPE
4224 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
4227 mode
= TYPE_MODE (TREE_TYPE (arg
));
4228 op0
= expand_expr (arg
, NULL_RTX
, VOIDmode
, 0);
4229 return expand_complex_abs (mode
, op0
, target
, 0);
4233 /* Expand an expression EXP that calls a built-in function,
4234 with result going to TARGET if that's convenient
4235 (and in mode MODE if that's convenient).
4236 SUBTARGET may be used as the target for computing one of EXP's operands.
4237 IGNORE is nonzero if the value is to be ignored. */
4240 expand_builtin (tree exp
, rtx target
, rtx subtarget
, enum machine_mode mode
,
4243 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4244 tree arglist
= TREE_OPERAND (exp
, 1);
4245 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
4246 enum machine_mode target_mode
= TYPE_MODE (TREE_TYPE (exp
));
4248 /* Perform postincrements before expanding builtin functions. Â */
4251 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
4252 return (*targetm
.expand_builtin
) (exp
, target
, subtarget
, mode
, ignore
);
4254 /* When not optimizing, generate calls to library functions for a certain
4256 if (!optimize
&& !CALLED_AS_BUILT_IN (fndecl
))
4260 case BUILT_IN_SQRTF
:
4261 case BUILT_IN_SQRTL
:
4278 case BUILT_IN_ATANF
:
4279 case BUILT_IN_ATANL
:
4283 case BUILT_IN_ATAN2
:
4284 case BUILT_IN_ATAN2F
:
4285 case BUILT_IN_ATAN2L
:
4286 case BUILT_IN_MEMSET
:
4287 case BUILT_IN_MEMCPY
:
4288 case BUILT_IN_MEMCMP
:
4289 case BUILT_IN_MEMPCPY
:
4290 case BUILT_IN_MEMMOVE
:
4292 case BUILT_IN_BZERO
:
4293 case BUILT_IN_BCOPY
:
4294 case BUILT_IN_INDEX
:
4295 case BUILT_IN_RINDEX
:
4296 case BUILT_IN_STPCPY
:
4297 case BUILT_IN_STRCHR
:
4298 case BUILT_IN_STRRCHR
:
4299 case BUILT_IN_STRLEN
:
4300 case BUILT_IN_STRCPY
:
4301 case BUILT_IN_STRNCPY
:
4302 case BUILT_IN_STRNCMP
:
4303 case BUILT_IN_STRSTR
:
4304 case BUILT_IN_STRPBRK
:
4305 case BUILT_IN_STRCAT
:
4306 case BUILT_IN_STRNCAT
:
4307 case BUILT_IN_STRSPN
:
4308 case BUILT_IN_STRCSPN
:
4309 case BUILT_IN_STRCMP
:
4311 case BUILT_IN_PUTCHAR
:
4313 case BUILT_IN_PRINTF
:
4314 case BUILT_IN_FPUTC
:
4315 case BUILT_IN_FPUTS
:
4316 case BUILT_IN_FWRITE
:
4317 case BUILT_IN_PUTCHAR_UNLOCKED
:
4318 case BUILT_IN_PUTS_UNLOCKED
:
4319 case BUILT_IN_PRINTF_UNLOCKED
:
4320 case BUILT_IN_FPUTC_UNLOCKED
:
4321 case BUILT_IN_FPUTS_UNLOCKED
:
4322 case BUILT_IN_FWRITE_UNLOCKED
:
4323 case BUILT_IN_FLOOR
:
4324 case BUILT_IN_FLOORF
:
4325 case BUILT_IN_FLOORL
:
4327 case BUILT_IN_CEILF
:
4328 case BUILT_IN_CEILL
:
4329 case BUILT_IN_TRUNC
:
4330 case BUILT_IN_TRUNCF
:
4331 case BUILT_IN_TRUNCL
:
4332 case BUILT_IN_ROUND
:
4333 case BUILT_IN_ROUNDF
:
4334 case BUILT_IN_ROUNDL
:
4335 case BUILT_IN_NEARBYINT
:
4336 case BUILT_IN_NEARBYINTF
:
4337 case BUILT_IN_NEARBYINTL
:
4338 return expand_call (exp
, target
, ignore
);
4344 /* The built-in function expanders test for target == const0_rtx
4345 to determine whether the function's result will be ignored. */
4347 target
= const0_rtx
;
4349 /* If the result of a pure or const built-in function is ignored, and
4350 none of its arguments are volatile, we can avoid expanding the
4351 built-in call and just evaluate the arguments for side-effects. */
4352 if (target
== const0_rtx
4353 && (DECL_IS_PURE (fndecl
) || TREE_READONLY (fndecl
)))
4355 bool volatilep
= false;
4358 for (arg
= arglist
; arg
; arg
= TREE_CHAIN (arg
))
4359 if (TREE_THIS_VOLATILE (TREE_VALUE (arg
)))
4367 for (arg
= arglist
; arg
; arg
= TREE_CHAIN (arg
))
4368 expand_expr (TREE_VALUE (arg
), const0_rtx
,
4369 VOIDmode
, EXPAND_NORMAL
);
4378 case BUILT_IN_LLABS
:
4379 case BUILT_IN_IMAXABS
:
4380 /* build_function_call changes these into ABS_EXPR. */
4384 case BUILT_IN_FABSF
:
4385 case BUILT_IN_FABSL
:
4386 target
= expand_builtin_fabs (arglist
, target
, subtarget
);
4392 case BUILT_IN_CABSF
:
4393 case BUILT_IN_CABSL
:
4394 if (flag_unsafe_math_optimizations
)
4396 target
= expand_builtin_cabs (arglist
, target
);
4403 case BUILT_IN_CONJF
:
4404 case BUILT_IN_CONJL
:
4405 case BUILT_IN_CREAL
:
4406 case BUILT_IN_CREALF
:
4407 case BUILT_IN_CREALL
:
4408 case BUILT_IN_CIMAG
:
4409 case BUILT_IN_CIMAGF
:
4410 case BUILT_IN_CIMAGL
:
4411 /* expand_tree_builtin changes these into CONJ_EXPR, REALPART_EXPR
4412 and IMAGPART_EXPR. */
4431 case BUILT_IN_ATANF
:
4432 case BUILT_IN_ATANL
:
4433 /* Treat these like sqrt only if unsafe math optimizations are allowed,
4434 because of possible accuracy problems. */
4435 if (! flag_unsafe_math_optimizations
)
4438 case BUILT_IN_SQRTF
:
4439 case BUILT_IN_SQRTL
:
4440 case BUILT_IN_FLOOR
:
4441 case BUILT_IN_FLOORF
:
4442 case BUILT_IN_FLOORL
:
4444 case BUILT_IN_CEILF
:
4445 case BUILT_IN_CEILL
:
4446 case BUILT_IN_TRUNC
:
4447 case BUILT_IN_TRUNCF
:
4448 case BUILT_IN_TRUNCL
:
4449 case BUILT_IN_ROUND
:
4450 case BUILT_IN_ROUNDF
:
4451 case BUILT_IN_ROUNDL
:
4452 case BUILT_IN_NEARBYINT
:
4453 case BUILT_IN_NEARBYINTF
:
4454 case BUILT_IN_NEARBYINTL
:
4455 target
= expand_builtin_mathfn (exp
, target
, subtarget
);
4463 case BUILT_IN_ATAN2
:
4464 case BUILT_IN_ATAN2F
:
4465 case BUILT_IN_ATAN2L
:
4466 if (! flag_unsafe_math_optimizations
)
4468 target
= expand_builtin_mathfn_2 (exp
, target
, subtarget
);
4473 case BUILT_IN_APPLY_ARGS
:
4474 return expand_builtin_apply_args ();
4476 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
4477 FUNCTION with a copy of the parameters described by
4478 ARGUMENTS, and ARGSIZE. It returns a block of memory
4479 allocated on the stack into which is stored all the registers
4480 that might possibly be used for returning the result of a
4481 function. ARGUMENTS is the value returned by
4482 __builtin_apply_args. ARGSIZE is the number of bytes of
4483 arguments that must be copied. ??? How should this value be
4484 computed? We'll also need a safe worst case value for varargs
4486 case BUILT_IN_APPLY
:
4487 if (!validate_arglist (arglist
, POINTER_TYPE
,
4488 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
)
4489 && !validate_arglist (arglist
, REFERENCE_TYPE
,
4490 POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4498 for (t
= arglist
, i
= 0; t
; t
= TREE_CHAIN (t
), i
++)
4499 ops
[i
] = expand_expr (TREE_VALUE (t
), NULL_RTX
, VOIDmode
, 0);
4501 return expand_builtin_apply (ops
[0], ops
[1], ops
[2]);
4504 /* __builtin_return (RESULT) causes the function to return the
4505 value described by RESULT. RESULT is address of the block of
4506 memory returned by __builtin_apply. */
4507 case BUILT_IN_RETURN
:
4508 if (validate_arglist (arglist
, POINTER_TYPE
, VOID_TYPE
))
4509 expand_builtin_return (expand_expr (TREE_VALUE (arglist
),
4510 NULL_RTX
, VOIDmode
, 0));
4513 case BUILT_IN_SAVEREGS
:
4514 return expand_builtin_saveregs ();
4516 case BUILT_IN_ARGS_INFO
:
4517 return expand_builtin_args_info (arglist
);
4519 /* Return the address of the first anonymous stack arg. */
4520 case BUILT_IN_NEXT_ARG
:
4521 return expand_builtin_next_arg (arglist
);
4523 case BUILT_IN_CLASSIFY_TYPE
:
4524 return expand_builtin_classify_type (arglist
);
4526 case BUILT_IN_CONSTANT_P
:
4527 return expand_builtin_constant_p (arglist
, target_mode
);
4529 case BUILT_IN_FRAME_ADDRESS
:
4530 case BUILT_IN_RETURN_ADDRESS
:
4531 return expand_builtin_frame_address (fndecl
, arglist
);
4533 /* Returns the address of the area where the structure is returned.
4535 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS
:
4537 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
)))
4538 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl
))) != MEM
)
4541 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl
)), 0);
4543 case BUILT_IN_ALLOCA
:
4544 target
= expand_builtin_alloca (arglist
, target
);
4551 case BUILT_IN_FFSLL
:
4552 target
= expand_builtin_unop (target_mode
, arglist
, target
,
4553 subtarget
, ffs_optab
);
4560 case BUILT_IN_CLZLL
:
4561 target
= expand_builtin_unop (target_mode
, arglist
, target
,
4562 subtarget
, clz_optab
);
4569 case BUILT_IN_CTZLL
:
4570 target
= expand_builtin_unop (target_mode
, arglist
, target
,
4571 subtarget
, ctz_optab
);
4576 case BUILT_IN_POPCOUNT
:
4577 case BUILT_IN_POPCOUNTL
:
4578 case BUILT_IN_POPCOUNTLL
:
4579 target
= expand_builtin_unop (target_mode
, arglist
, target
,
4580 subtarget
, popcount_optab
);
4585 case BUILT_IN_PARITY
:
4586 case BUILT_IN_PARITYL
:
4587 case BUILT_IN_PARITYLL
:
4588 target
= expand_builtin_unop (target_mode
, arglist
, target
,
4589 subtarget
, parity_optab
);
4594 case BUILT_IN_STRLEN
:
4595 target
= expand_builtin_strlen (arglist
, target
, target_mode
);
4600 case BUILT_IN_STRCPY
:
4601 target
= expand_builtin_strcpy (arglist
, target
, mode
);
4606 case BUILT_IN_STRNCPY
:
4607 target
= expand_builtin_strncpy (arglist
, target
, mode
);
4612 case BUILT_IN_STPCPY
:
4613 target
= expand_builtin_stpcpy (arglist
, target
, mode
);
4618 case BUILT_IN_STRCAT
:
4619 target
= expand_builtin_strcat (arglist
, target
, mode
);
4624 case BUILT_IN_STRNCAT
:
4625 target
= expand_builtin_strncat (arglist
, target
, mode
);
4630 case BUILT_IN_STRSPN
:
4631 target
= expand_builtin_strspn (arglist
, target
, mode
);
4636 case BUILT_IN_STRCSPN
:
4637 target
= expand_builtin_strcspn (arglist
, target
, mode
);
4642 case BUILT_IN_STRSTR
:
4643 target
= expand_builtin_strstr (arglist
, target
, mode
);
4648 case BUILT_IN_STRPBRK
:
4649 target
= expand_builtin_strpbrk (arglist
, target
, mode
);
4654 case BUILT_IN_INDEX
:
4655 case BUILT_IN_STRCHR
:
4656 target
= expand_builtin_strchr (arglist
, target
, mode
);
4661 case BUILT_IN_RINDEX
:
4662 case BUILT_IN_STRRCHR
:
4663 target
= expand_builtin_strrchr (arglist
, target
, mode
);
4668 case BUILT_IN_MEMCPY
:
4669 target
= expand_builtin_memcpy (arglist
, target
, mode
);
4674 case BUILT_IN_MEMPCPY
:
4675 target
= expand_builtin_mempcpy (arglist
, target
, mode
, /*endp=*/ 1);
4680 case BUILT_IN_MEMMOVE
:
4681 target
= expand_builtin_memmove (arglist
, target
, mode
);
4686 case BUILT_IN_BCOPY
:
4687 target
= expand_builtin_bcopy (arglist
);
4692 case BUILT_IN_MEMSET
:
4693 target
= expand_builtin_memset (arglist
, target
, mode
);
4698 case BUILT_IN_BZERO
:
4699 target
= expand_builtin_bzero (arglist
);
4704 case BUILT_IN_STRCMP
:
4705 target
= expand_builtin_strcmp (exp
, target
, mode
);
4710 case BUILT_IN_STRNCMP
:
4711 target
= expand_builtin_strncmp (exp
, target
, mode
);
4717 case BUILT_IN_MEMCMP
:
4718 target
= expand_builtin_memcmp (exp
, arglist
, target
, mode
);
4723 case BUILT_IN_SETJMP
:
4724 target
= expand_builtin_setjmp (arglist
, target
);
4729 /* __builtin_longjmp is passed a pointer to an array of five words.
4730 It's similar to the C library longjmp function but works with
4731 __builtin_setjmp above. */
4732 case BUILT_IN_LONGJMP
:
4733 if (!validate_arglist (arglist
, POINTER_TYPE
, INTEGER_TYPE
, VOID_TYPE
))
4737 rtx buf_addr
= expand_expr (TREE_VALUE (arglist
), subtarget
,
4739 rtx value
= expand_expr (TREE_VALUE (TREE_CHAIN (arglist
)),
4740 NULL_RTX
, VOIDmode
, 0);
4742 if (value
!= const1_rtx
)
4744 error ("__builtin_longjmp second argument must be 1");
4748 expand_builtin_longjmp (buf_addr
, value
);
4753 expand_builtin_trap ();
4756 case BUILT_IN_FPUTS
:
4757 target
= expand_builtin_fputs (arglist
, ignore
,/*unlocked=*/ 0);
4761 case BUILT_IN_FPUTS_UNLOCKED
:
4762 target
= expand_builtin_fputs (arglist
, ignore
,/*unlocked=*/ 1);
4767 /* Various hooks for the DWARF 2 __throw routine. */
4768 case BUILT_IN_UNWIND_INIT
:
4769 expand_builtin_unwind_init ();
4771 case BUILT_IN_DWARF_CFA
:
4772 return virtual_cfa_rtx
;
4773 #ifdef DWARF2_UNWIND_INFO
4774 case BUILT_IN_DWARF_SP_COLUMN
:
4775 return expand_builtin_dwarf_sp_column ();
4776 case BUILT_IN_INIT_DWARF_REG_SIZES
:
4777 expand_builtin_init_dwarf_reg_sizes (TREE_VALUE (arglist
));
4780 case BUILT_IN_FROB_RETURN_ADDR
:
4781 return expand_builtin_frob_return_addr (TREE_VALUE (arglist
));
4782 case BUILT_IN_EXTRACT_RETURN_ADDR
:
4783 return expand_builtin_extract_return_addr (TREE_VALUE (arglist
));
4784 case BUILT_IN_EH_RETURN
:
4785 expand_builtin_eh_return (TREE_VALUE (arglist
),
4786 TREE_VALUE (TREE_CHAIN (arglist
)));
4788 #ifdef EH_RETURN_DATA_REGNO
4789 case BUILT_IN_EH_RETURN_DATA_REGNO
:
4790 return expand_builtin_eh_return_data_regno (arglist
);
4792 case BUILT_IN_VA_START
:
4793 case BUILT_IN_STDARG_START
:
4794 return expand_builtin_va_start (arglist
);
4795 case BUILT_IN_VA_END
:
4796 return expand_builtin_va_end (arglist
);
4797 case BUILT_IN_VA_COPY
:
4798 return expand_builtin_va_copy (arglist
);
4799 case BUILT_IN_EXPECT
:
4800 return expand_builtin_expect (arglist
, target
);
4801 case BUILT_IN_PREFETCH
:
4802 expand_builtin_prefetch (arglist
);
4806 default: /* just do library call, if unknown builtin */
4807 if (!DECL_ASSEMBLER_NAME_SET_P (fndecl
))
4808 error ("built-in function `%s' not currently supported",
4809 IDENTIFIER_POINTER (DECL_NAME (fndecl
)));
4812 /* The switch statement above can drop through to cause the function
4813 to be called normally. */
4814 return expand_call (exp
, target
, ignore
);
4817 /* Determine whether a tree node represents a call to a built-in
4818 math function. If the tree T is a call to a built-in function
4819 taking a single real argument, then the return value is the
4820 DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT. Otherwise
4821 the return value is END_BUILTINS. */
4823 enum built_in_function
4824 builtin_mathfn_code (tree t
)
4826 tree fndecl
, arglist
;
4828 if (TREE_CODE (t
) != CALL_EXPR
4829 || TREE_CODE (TREE_OPERAND (t
, 0)) != ADDR_EXPR
)
4830 return END_BUILTINS
;
4832 fndecl
= TREE_OPERAND (TREE_OPERAND (t
, 0), 0);
4833 if (TREE_CODE (fndecl
) != FUNCTION_DECL
4834 || ! DECL_BUILT_IN (fndecl
)
4835 || DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
4836 return END_BUILTINS
;
4838 arglist
= TREE_OPERAND (t
, 1);
4840 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != REAL_TYPE
)
4841 return END_BUILTINS
;
4843 arglist
= TREE_CHAIN (arglist
);
4844 switch (DECL_FUNCTION_CODE (fndecl
))
4849 case BUILT_IN_ATAN2
:
4850 case BUILT_IN_ATAN2F
:
4851 case BUILT_IN_ATAN2L
:
4853 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))) != REAL_TYPE
4854 || TREE_CHAIN (arglist
))
4855 return END_BUILTINS
;
4860 return END_BUILTINS
;
4864 return DECL_FUNCTION_CODE (fndecl
);
4867 /* Fold a call to __builtin_constant_p, if we know it will evaluate to a
4868 constant. ARGLIST is the argument list of the call. */
4871 fold_builtin_constant_p (tree arglist
)
4876 arglist
= TREE_VALUE (arglist
);
4878 /* We return 1 for a numeric type that's known to be a constant
4879 value at compile-time or for an aggregate type that's a
4880 literal constant. */
4881 STRIP_NOPS (arglist
);
4883 /* If we know this is a constant, emit the constant of one. */
4884 if (TREE_CODE_CLASS (TREE_CODE (arglist
)) == 'c'
4885 || (TREE_CODE (arglist
) == CONSTRUCTOR
4886 && TREE_CONSTANT (arglist
))
4887 || (TREE_CODE (arglist
) == ADDR_EXPR
4888 && TREE_CODE (TREE_OPERAND (arglist
, 0)) == STRING_CST
))
4889 return integer_one_node
;
4891 /* If we aren't going to be running CSE or this expression
4892 has side effects, show we don't know it to be a constant.
4893 Likewise if it's a pointer or aggregate type since in those
4894 case we only want literals, since those are only optimized
4895 when generating RTL, not later.
4896 And finally, if we are compiling an initializer, not code, we
4897 need to return a definite result now; there's not going to be any
4898 more optimization done. */
4899 if (TREE_SIDE_EFFECTS (arglist
) || cse_not_expected
4900 || AGGREGATE_TYPE_P (TREE_TYPE (arglist
))
4901 || POINTER_TYPE_P (TREE_TYPE (arglist
))
4903 return integer_zero_node
;
4908 /* Fold a call to __builtin_classify_type. */
4911 fold_builtin_classify_type (tree arglist
)
4914 return build_int_2 (no_type_class
, 0);
4916 return build_int_2 (type_to_class (TREE_TYPE (TREE_VALUE (arglist
))), 0);
4919 /* Fold a call to __builtin_inf or __builtin_huge_val. */
4922 fold_builtin_inf (tree type
, int warn
)
4924 REAL_VALUE_TYPE real
;
4926 if (!MODE_HAS_INFINITIES (TYPE_MODE (type
)) && warn
)
4927 warning ("target format does not support infinity");
4930 return build_real (type
, real
);
4933 /* Fold a call to __builtin_nan or __builtin_nans. */
4936 fold_builtin_nan (tree arglist
, tree type
, int quiet
)
4938 REAL_VALUE_TYPE real
;
4941 if (!validate_arglist (arglist
, POINTER_TYPE
, VOID_TYPE
))
4943 str
= c_getstr (TREE_VALUE (arglist
));
4947 if (!real_nan (&real
, str
, quiet
, TYPE_MODE (type
)))
4950 return build_real (type
, real
);
4953 /* EXP is assumed to me builtin call where truncation can be propagated
4954 across (for instance floor((double)f) == (double)floorf (f).
4955 Do the transformation. */
4957 fold_trunc_transparent_mathfn (tree exp
)
4959 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4960 tree arglist
= TREE_OPERAND (exp
, 1);
4961 enum built_in_function fcode
= DECL_FUNCTION_CODE (fndecl
);
4963 if (optimize
&& validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
4965 tree arg0
= strip_float_extensions (TREE_VALUE (arglist
));
4966 tree ftype
= TREE_TYPE (exp
);
4967 tree newtype
= TREE_TYPE (arg0
);
4970 if (TYPE_PRECISION (newtype
) < TYPE_PRECISION (ftype
)
4971 && (decl
= mathfn_built_in (newtype
, fcode
)))
4974 build_tree_list (NULL_TREE
, fold (convert (newtype
, arg0
)));
4975 return convert (ftype
,
4976 build_function_call_expr (decl
, arglist
));
4982 /* Fold function call to builtin cabs, cabsf or cabsl. FNDECL is the
4983 function's DECL, ARGLIST is the argument list and TYPE is the return
4984 type. Return NULL_TREE if no simplification can be made. */
4987 fold_builtin_cabs (tree fndecl
, tree arglist
, tree type
)
4991 if (!arglist
|| TREE_CHAIN (arglist
))
4994 arg
= TREE_VALUE (arglist
);
4995 if (TREE_CODE (TREE_TYPE (arg
)) != COMPLEX_TYPE
4996 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) != REAL_TYPE
)
4999 /* Evaluate cabs of a constant at compile-time. */
5000 if (flag_unsafe_math_optimizations
5001 && TREE_CODE (arg
) == COMPLEX_CST
5002 && TREE_CODE (TREE_REALPART (arg
)) == REAL_CST
5003 && TREE_CODE (TREE_IMAGPART (arg
)) == REAL_CST
5004 && ! TREE_CONSTANT_OVERFLOW (TREE_REALPART (arg
))
5005 && ! TREE_CONSTANT_OVERFLOW (TREE_IMAGPART (arg
)))
5007 REAL_VALUE_TYPE r
, i
;
5009 r
= TREE_REAL_CST (TREE_REALPART (arg
));
5010 i
= TREE_REAL_CST (TREE_IMAGPART (arg
));
5012 real_arithmetic (&r
, MULT_EXPR
, &r
, &r
);
5013 real_arithmetic (&i
, MULT_EXPR
, &i
, &i
);
5014 real_arithmetic (&r
, PLUS_EXPR
, &r
, &i
);
5015 if (real_sqrt (&r
, TYPE_MODE (type
), &r
)
5016 || ! flag_trapping_math
)
5017 return build_real (type
, r
);
5020 /* If either part is zero, cabs is fabs of the other. */
5021 if (TREE_CODE (arg
) == COMPLEX_EXPR
5022 && real_zerop (TREE_OPERAND (arg
, 0)))
5023 return fold (build1 (ABS_EXPR
, type
, TREE_OPERAND (arg
, 1)));
5024 if (TREE_CODE (arg
) == COMPLEX_EXPR
5025 && real_zerop (TREE_OPERAND (arg
, 1)))
5026 return fold (build1 (ABS_EXPR
, type
, TREE_OPERAND (arg
, 0)));
5028 if (flag_unsafe_math_optimizations
)
5030 enum built_in_function fcode
;
5033 fcode
= DECL_FUNCTION_CODE (fndecl
);
5034 if (fcode
== BUILT_IN_CABS
)
5035 sqrtfn
= implicit_built_in_decls
[BUILT_IN_SQRT
];
5036 else if (fcode
== BUILT_IN_CABSF
)
5037 sqrtfn
= implicit_built_in_decls
[BUILT_IN_SQRTF
];
5038 else if (fcode
== BUILT_IN_CABSL
)
5039 sqrtfn
= implicit_built_in_decls
[BUILT_IN_SQRTL
];
5043 if (sqrtfn
!= NULL_TREE
)
5045 tree rpart
, ipart
, result
, arglist
;
5047 rpart
= fold (build1 (REALPART_EXPR
, type
, arg
));
5048 ipart
= fold (build1 (IMAGPART_EXPR
, type
, arg
));
5050 rpart
= save_expr (rpart
);
5051 ipart
= save_expr (ipart
);
5053 result
= fold (build (PLUS_EXPR
, type
,
5054 fold (build (MULT_EXPR
, type
,
5056 fold (build (MULT_EXPR
, type
,
5059 arglist
= build_tree_list (NULL_TREE
, result
);
5060 return build_function_call_expr (sqrtfn
, arglist
);
5067 /* Used by constant folding to eliminate some builtin calls early. EXP is
5068 the CALL_EXPR of a call to a builtin function. */
5071 fold_builtin (tree exp
)
5073 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
5074 tree arglist
= TREE_OPERAND (exp
, 1);
5075 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
5077 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_MD
)
5080 switch (DECL_FUNCTION_CODE (fndecl
))
5082 case BUILT_IN_CONSTANT_P
:
5083 return fold_builtin_constant_p (arglist
);
5085 case BUILT_IN_CLASSIFY_TYPE
:
5086 return fold_builtin_classify_type (arglist
);
5088 case BUILT_IN_STRLEN
:
5089 if (validate_arglist (arglist
, POINTER_TYPE
, VOID_TYPE
))
5091 tree len
= c_strlen (TREE_VALUE (arglist
));
5094 /* Convert from the internal "sizetype" type to "size_t". */
5096 len
= convert (size_type_node
, len
);
5103 case BUILT_IN_FABSF
:
5104 case BUILT_IN_FABSL
:
5105 if (validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
5106 return fold (build1 (ABS_EXPR
, type
, TREE_VALUE (arglist
)));
5110 case BUILT_IN_CABSF
:
5111 case BUILT_IN_CABSL
:
5112 return fold_builtin_cabs (fndecl
, arglist
, type
);
5115 case BUILT_IN_SQRTF
:
5116 case BUILT_IN_SQRTL
:
5117 if (validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
5119 enum built_in_function fcode
;
5120 tree arg
= TREE_VALUE (arglist
);
5122 /* Optimize sqrt of constant value. */
5123 if (TREE_CODE (arg
) == REAL_CST
5124 && ! TREE_CONSTANT_OVERFLOW (arg
))
5126 REAL_VALUE_TYPE r
, x
;
5128 x
= TREE_REAL_CST (arg
);
5129 if (real_sqrt (&r
, TYPE_MODE (type
), &x
)
5130 || (!flag_trapping_math
&& !flag_errno_math
))
5131 return build_real (type
, r
);
5134 /* Optimize sqrt(exp(x)) = exp(x*0.5). */
5135 fcode
= builtin_mathfn_code (arg
);
5136 if (flag_unsafe_math_optimizations
5137 && (fcode
== BUILT_IN_EXP
5138 || fcode
== BUILT_IN_EXPF
5139 || fcode
== BUILT_IN_EXPL
))
5141 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
5142 arg
= fold (build (MULT_EXPR
, type
,
5143 TREE_VALUE (TREE_OPERAND (arg
, 1)),
5144 build_real (type
, dconsthalf
)));
5145 arglist
= build_tree_list (NULL_TREE
, arg
);
5146 return build_function_call_expr (expfn
, arglist
);
5149 /* Optimize sqrt(pow(x,y)) = pow(x,y*0.5). */
5150 if (flag_unsafe_math_optimizations
5151 && (fcode
== BUILT_IN_POW
5152 || fcode
== BUILT_IN_POWF
5153 || fcode
== BUILT_IN_POWL
))
5155 tree powfn
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
5156 tree arg0
= TREE_VALUE (TREE_OPERAND (arg
, 1));
5157 tree arg1
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg
, 1)));
5158 tree narg1
= fold (build (MULT_EXPR
, type
, arg1
,
5159 build_real (type
, dconsthalf
)));
5160 arglist
= tree_cons (NULL_TREE
, arg0
,
5161 build_tree_list (NULL_TREE
, narg1
));
5162 return build_function_call_expr (powfn
, arglist
);
5170 if (validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
5172 tree arg
= TREE_VALUE (arglist
);
5174 /* Optimize sin(0.0) = 0.0. */
5175 if (real_zerop (arg
))
5183 if (validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
5185 tree arg
= TREE_VALUE (arglist
);
5187 /* Optimize cos(0.0) = 1.0. */
5188 if (real_zerop (arg
))
5189 return build_real (type
, dconst1
);
5191 /* Optimize cos(-x) into cos(x). */
5192 if (TREE_CODE (arg
) == NEGATE_EXPR
)
5194 tree arglist
= build_tree_list (NULL_TREE
,
5195 TREE_OPERAND (arg
, 0));
5196 return build_function_call_expr (fndecl
, arglist
);
5204 if (validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
5206 enum built_in_function fcode
;
5207 tree arg
= TREE_VALUE (arglist
);
5209 /* Optimize exp(0.0) = 1.0. */
5210 if (real_zerop (arg
))
5211 return build_real (type
, dconst1
);
5213 /* Optimize exp(1.0) = e. */
5214 if (real_onep (arg
))
5216 REAL_VALUE_TYPE cst
;
5218 if (! builtin_dconsts_init
)
5219 init_builtin_dconsts ();
5220 real_convert (&cst
, TYPE_MODE (type
), &dconste
);
5221 return build_real (type
, cst
);
5224 /* Attempt to evaluate exp at compile-time. */
5225 if (flag_unsafe_math_optimizations
5226 && TREE_CODE (arg
) == REAL_CST
5227 && ! TREE_CONSTANT_OVERFLOW (arg
))
5229 REAL_VALUE_TYPE cint
;
5233 c
= TREE_REAL_CST (arg
);
5234 n
= real_to_integer (&c
);
5235 real_from_integer (&cint
, VOIDmode
, n
,
5237 if (real_identical (&c
, &cint
))
5241 if (! builtin_dconsts_init
)
5242 init_builtin_dconsts ();
5243 real_powi (&x
, TYPE_MODE (type
), &dconste
, n
);
5244 return build_real (type
, x
);
5248 /* Optimize exp(log(x)) = x. */
5249 fcode
= builtin_mathfn_code (arg
);
5250 if (flag_unsafe_math_optimizations
5251 && (fcode
== BUILT_IN_LOG
5252 || fcode
== BUILT_IN_LOGF
5253 || fcode
== BUILT_IN_LOGL
))
5254 return TREE_VALUE (TREE_OPERAND (arg
, 1));
5261 if (validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
5263 enum built_in_function fcode
;
5264 tree arg
= TREE_VALUE (arglist
);
5266 /* Optimize log(1.0) = 0.0. */
5267 if (real_onep (arg
))
5268 return build_real (type
, dconst0
);
5270 /* Optimize log(exp(x)) = x. */
5271 fcode
= builtin_mathfn_code (arg
);
5272 if (flag_unsafe_math_optimizations
5273 && (fcode
== BUILT_IN_EXP
5274 || fcode
== BUILT_IN_EXPF
5275 || fcode
== BUILT_IN_EXPL
))
5276 return TREE_VALUE (TREE_OPERAND (arg
, 1));
5278 /* Optimize log(sqrt(x)) = log(x)*0.5. */
5279 if (flag_unsafe_math_optimizations
5280 && (fcode
== BUILT_IN_SQRT
5281 || fcode
== BUILT_IN_SQRTF
5282 || fcode
== BUILT_IN_SQRTL
))
5284 tree logfn
= build_function_call_expr (fndecl
,
5285 TREE_OPERAND (arg
, 1));
5286 return fold (build (MULT_EXPR
, type
, logfn
,
5287 build_real (type
, dconsthalf
)));
5290 /* Optimize log(pow(x,y)) = y*log(x). */
5291 if (flag_unsafe_math_optimizations
5292 && (fcode
== BUILT_IN_POW
5293 || fcode
== BUILT_IN_POWF
5294 || fcode
== BUILT_IN_POWL
))
5296 tree arg0
, arg1
, logfn
;
5298 arg0
= TREE_VALUE (TREE_OPERAND (arg
, 1));
5299 arg1
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg
, 1)));
5300 arglist
= build_tree_list (NULL_TREE
, arg0
);
5301 logfn
= build_function_call_expr (fndecl
, arglist
);
5302 return fold (build (MULT_EXPR
, type
, arg1
, logfn
));
5310 if (validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
5312 enum built_in_function fcode
;
5313 tree arg
= TREE_VALUE (arglist
);
5315 /* Optimize tan(0.0) = 0.0. */
5316 if (real_zerop (arg
))
5319 /* Optimize tan(atan(x)) = x. */
5320 fcode
= builtin_mathfn_code (arg
);
5321 if (flag_unsafe_math_optimizations
5322 && (fcode
== BUILT_IN_ATAN
5323 || fcode
== BUILT_IN_ATANF
5324 || fcode
== BUILT_IN_ATANL
))
5325 return TREE_VALUE (TREE_OPERAND (arg
, 1));
5330 case BUILT_IN_ATANF
:
5331 case BUILT_IN_ATANL
:
5332 if (validate_arglist (arglist
, REAL_TYPE
, VOID_TYPE
))
5334 tree arg
= TREE_VALUE (arglist
);
5336 /* Optimize atan(0.0) = 0.0. */
5337 if (real_zerop (arg
))
5340 /* Optimize atan(1.0) = pi/4. */
5341 if (real_onep (arg
))
5343 REAL_VALUE_TYPE cst
;
5345 if (! builtin_dconsts_init
)
5346 init_builtin_dconsts ();
5347 real_convert (&cst
, TYPE_MODE (type
), &dconstpi
);
5349 return build_real (type
, cst
);
5357 if (validate_arglist (arglist
, REAL_TYPE
, REAL_TYPE
, VOID_TYPE
))
5359 enum built_in_function fcode
;
5360 tree arg0
= TREE_VALUE (arglist
);
5361 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
5363 /* Optimize pow(1.0,y) = 1.0. */
5364 if (real_onep (arg0
))
5365 return omit_one_operand (type
, build_real (type
, dconst1
), arg1
);
5367 if (TREE_CODE (arg1
) == REAL_CST
5368 && ! TREE_CONSTANT_OVERFLOW (arg1
))
5371 c
= TREE_REAL_CST (arg1
);
5373 /* Optimize pow(x,0.0) = 1.0. */
5374 if (REAL_VALUES_EQUAL (c
, dconst0
))
5375 return omit_one_operand (type
, build_real (type
, dconst1
),
5378 /* Optimize pow(x,1.0) = x. */
5379 if (REAL_VALUES_EQUAL (c
, dconst1
))
5382 /* Optimize pow(x,-1.0) = 1.0/x. */
5383 if (REAL_VALUES_EQUAL (c
, dconstm1
))
5384 return fold (build (RDIV_EXPR
, type
,
5385 build_real (type
, dconst1
),
5388 /* Optimize pow(x,2.0) = x*x. */
5389 if (REAL_VALUES_EQUAL (c
, dconst2
)
5390 && (*lang_hooks
.decls
.global_bindings_p
) () == 0
5391 && ! CONTAINS_PLACEHOLDER_P (arg0
))
5393 arg0
= save_expr (arg0
);
5394 return fold (build (MULT_EXPR
, type
, arg0
, arg0
));
5397 /* Optimize pow(x,-2.0) = 1.0/(x*x). */
5398 if (flag_unsafe_math_optimizations
5399 && REAL_VALUES_EQUAL (c
, dconstm2
)
5400 && (*lang_hooks
.decls
.global_bindings_p
) () == 0
5401 && ! CONTAINS_PLACEHOLDER_P (arg0
))
5403 arg0
= save_expr (arg0
);
5404 return fold (build (RDIV_EXPR
, type
,
5405 build_real (type
, dconst1
),
5406 fold (build (MULT_EXPR
, type
,
5410 /* Optimize pow(x,0.5) = sqrt(x). */
5411 if (flag_unsafe_math_optimizations
5412 && REAL_VALUES_EQUAL (c
, dconsthalf
))
5416 fcode
= DECL_FUNCTION_CODE (fndecl
);
5417 if (fcode
== BUILT_IN_POW
)
5418 sqrtfn
= implicit_built_in_decls
[BUILT_IN_SQRT
];
5419 else if (fcode
== BUILT_IN_POWF
)
5420 sqrtfn
= implicit_built_in_decls
[BUILT_IN_SQRTF
];
5421 else if (fcode
== BUILT_IN_POWL
)
5422 sqrtfn
= implicit_built_in_decls
[BUILT_IN_SQRTL
];
5426 if (sqrtfn
!= NULL_TREE
)
5428 tree arglist
= build_tree_list (NULL_TREE
, arg0
);
5429 return build_function_call_expr (sqrtfn
, arglist
);
5433 /* Attempt to evaluate pow at compile-time. */
5434 if (TREE_CODE (arg0
) == REAL_CST
5435 && ! TREE_CONSTANT_OVERFLOW (arg0
))
5437 REAL_VALUE_TYPE cint
;
5440 n
= real_to_integer (&c
);
5441 real_from_integer (&cint
, VOIDmode
, n
,
5443 if (real_identical (&c
, &cint
))
5448 x
= TREE_REAL_CST (arg0
);
5449 inexact
= real_powi (&x
, TYPE_MODE (type
), &x
, n
);
5450 if (flag_unsafe_math_optimizations
|| !inexact
)
5451 return build_real (type
, x
);
5456 /* Optimize pow(exp(x),y) = exp(x*y). */
5457 fcode
= builtin_mathfn_code (arg0
);
5458 if (flag_unsafe_math_optimizations
5459 && (fcode
== BUILT_IN_EXP
5460 || fcode
== BUILT_IN_EXPF
5461 || fcode
== BUILT_IN_EXPL
))
5463 tree expfn
= TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0);
5464 tree arg
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
5465 arg
= fold (build (MULT_EXPR
, type
, arg
, arg1
));
5466 arglist
= build_tree_list (NULL_TREE
, arg
);
5467 return build_function_call_expr (expfn
, arglist
);
5470 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
5471 if (flag_unsafe_math_optimizations
5472 && (fcode
== BUILT_IN_SQRT
5473 || fcode
== BUILT_IN_SQRTF
5474 || fcode
== BUILT_IN_SQRTL
))
5476 tree narg0
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
5477 tree narg1
= fold (build (MULT_EXPR
, type
, arg1
,
5478 build_real (type
, dconsthalf
)));
5480 arglist
= tree_cons (NULL_TREE
, narg0
,
5481 build_tree_list (NULL_TREE
, narg1
));
5482 return build_function_call_expr (fndecl
, arglist
);
5485 /* Optimize pow(pow(x,y),z) = pow(x,y*z). */
5486 if (flag_unsafe_math_optimizations
5487 && (fcode
== BUILT_IN_POW
5488 || fcode
== BUILT_IN_POWF
5489 || fcode
== BUILT_IN_POWL
))
5491 tree arg00
= TREE_VALUE (TREE_OPERAND (arg0
, 1));
5492 tree arg01
= TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0
, 1)));
5493 tree narg1
= fold (build (MULT_EXPR
, type
, arg01
, arg1
));
5494 arglist
= tree_cons (NULL_TREE
, arg00
,
5495 build_tree_list (NULL_TREE
, narg1
));
5496 return build_function_call_expr (fndecl
, arglist
);
5504 return fold_builtin_inf (type
, true);
5506 case BUILT_IN_HUGE_VAL
:
5507 case BUILT_IN_HUGE_VALF
:
5508 case BUILT_IN_HUGE_VALL
:
5509 return fold_builtin_inf (type
, false);
5514 return fold_builtin_nan (arglist
, type
, true);
5517 case BUILT_IN_NANSF
:
5518 case BUILT_IN_NANSL
:
5519 return fold_builtin_nan (arglist
, type
, false);
5521 case BUILT_IN_FLOOR
:
5522 case BUILT_IN_FLOORF
:
5523 case BUILT_IN_FLOORL
:
5525 case BUILT_IN_CEILF
:
5526 case BUILT_IN_CEILL
:
5527 case BUILT_IN_TRUNC
:
5528 case BUILT_IN_TRUNCF
:
5529 case BUILT_IN_TRUNCL
:
5530 case BUILT_IN_ROUND
:
5531 case BUILT_IN_ROUNDF
:
5532 case BUILT_IN_ROUNDL
:
5533 case BUILT_IN_NEARBYINT
:
5534 case BUILT_IN_NEARBYINTF
:
5535 case BUILT_IN_NEARBYINTL
:
5536 return fold_trunc_transparent_mathfn (exp
);
5545 /* Conveniently construct a function call expression. */
5548 build_function_call_expr (tree fn
, tree arglist
)
5552 call_expr
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (fn
)), fn
);
5553 call_expr
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fn
)),
5554 call_expr
, arglist
);
5555 TREE_SIDE_EFFECTS (call_expr
) = 1;
5556 return fold (call_expr
);
5559 /* This function validates the types of a function call argument list
5560 represented as a tree chain of parameters against a specified list
5561 of tree_codes. If the last specifier is a 0, that represents an
5562 ellipses, otherwise the last specifier must be a VOID_TYPE. */
5565 validate_arglist (tree arglist
, ...)
5567 enum tree_code code
;
5571 va_start (ap
, arglist
);
5575 code
= va_arg (ap
, enum tree_code
);
5579 /* This signifies an ellipses, any further arguments are all ok. */
5583 /* This signifies an endlink, if no arguments remain, return
5584 true, otherwise return false. */
5588 /* If no parameters remain or the parameter's code does not
5589 match the specified code, return false. Otherwise continue
5590 checking any remaining arguments. */
5592 || code
!= TREE_CODE (TREE_TYPE (TREE_VALUE (arglist
))))
5596 arglist
= TREE_CHAIN (arglist
);
5600 /* We need gotos here since we can only have one VA_CLOSE in a
5608 /* Default version of target-specific builtin setup that does nothing. */
5611 default_init_builtins (void)
5615 /* Default target-specific builtin expander that does nothing. */
5618 default_expand_builtin (tree exp ATTRIBUTE_UNUSED
,
5619 rtx target ATTRIBUTE_UNUSED
,
5620 rtx subtarget ATTRIBUTE_UNUSED
,
5621 enum machine_mode mode ATTRIBUTE_UNUSED
,
5622 int ignore ATTRIBUTE_UNUSED
)
5627 /* Instantiate all remaining CONSTANT_P_RTX nodes. */
5630 purge_builtin_constant_p (void)
5632 rtx insn
, set
, arg
, new, note
;
5634 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5636 && (set
= single_set (insn
)) != NULL_RTX
5637 && (GET_CODE (arg
= SET_SRC (set
)) == CONSTANT_P_RTX
5638 || (GET_CODE (arg
) == SUBREG
5639 && (GET_CODE (arg
= SUBREG_REG (arg
))
5640 == CONSTANT_P_RTX
))))
5642 arg
= XEXP (arg
, 0);
5643 new = CONSTANT_P (arg
) ? const1_rtx
: const0_rtx
;
5644 validate_change (insn
, &SET_SRC (set
), new, 0);
5646 /* Remove the REG_EQUAL note from the insn. */
5647 if ((note
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
)) != 0)
5648 remove_note (insn
, note
);
5652 /* Returns true is EXP represents data that would potentially reside
5653 in a readonly section. */
5656 readonly_data_expr (tree exp
)
5660 if (TREE_CODE (exp
) == ADDR_EXPR
)
5661 return decl_readonly_section (TREE_OPERAND (exp
, 0), 0);