1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
39 #include "langhooks.h"
44 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
45 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
47 /* Data structure and subroutines used within expand_call. */
51 /* Tree node for this argument. */
53 /* Mode for value; TYPE_MODE unless promoted. */
54 enum machine_mode mode
;
55 /* Current RTL value for argument, or 0 if it isn't precomputed. */
57 /* Initially-compute RTL value for argument; only for const functions. */
59 /* Register to pass this argument in, 0 if passed on stack, or an
60 PARALLEL if the arg is to be copied into multiple non-contiguous
63 /* Register to pass this argument in when generating tail call sequence.
64 This is not the same register as for normal calls on machines with
67 /* If REG was promoted from the actual mode of the argument expression,
68 indicates whether the promotion is sign- or zero-extended. */
70 /* Number of registers to use. 0 means put the whole arg in registers.
71 Also 0 if not passed in registers. */
73 /* Nonzero if argument must be passed on stack.
74 Note that some arguments may be passed on the stack
75 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
76 pass_on_stack identifies arguments that *cannot* go in registers. */
78 /* Some fields packaged up for locate_and_pad_parm. */
79 struct locate_and_pad_arg_data locate
;
80 /* Location on the stack at which parameter should be stored. The store
81 has already been done if STACK == VALUE. */
83 /* Location on the stack of the start of this argument slot. This can
84 differ from STACK if this arg pads downward. This location is known
85 to be aligned to FUNCTION_ARG_BOUNDARY. */
87 /* Place that this stack area has been saved, if needed. */
89 /* If an argument's alignment does not permit direct copying into registers,
90 copy in smaller-sized pieces into pseudos. These are stored in a
91 block pointed to by this field. The next field says how many
92 word-sized pseudos we made. */
97 /* A vector of one char per byte of stack space. A byte if nonzero if
98 the corresponding stack location has been used.
99 This vector is used to prevent a function call within an argument from
100 clobbering any stack already set up. */
101 static char *stack_usage_map
;
103 /* Size of STACK_USAGE_MAP. */
104 static int highest_outgoing_arg_in_use
;
106 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
107 stack location's tail call argument has been already stored into the stack.
108 This bitmap is used to prevent sibling call optimization if function tries
109 to use parent's incoming argument slots when they have been already
110 overwritten with tail call arguments. */
111 static sbitmap stored_args_map
;
113 /* stack_arg_under_construction is nonzero when an argument may be
114 initialized with a constructor call (including a C function that
115 returns a BLKmode struct) and expand_call must take special action
116 to make sure the object being constructed does not overlap the
117 argument list for the constructor call. */
118 int stack_arg_under_construction
;
120 static int calls_function (tree
, int);
121 static int calls_function_1 (tree
, int);
123 static void emit_call_1 (rtx
, tree
, tree
, HOST_WIDE_INT
, HOST_WIDE_INT
,
124 HOST_WIDE_INT
, rtx
, rtx
, int, rtx
, int,
126 static void precompute_register_parameters (int, struct arg_data
*, int *);
127 static int store_one_arg (struct arg_data
*, rtx
, int, int, int);
128 static void store_unaligned_arguments_into_pseudos (struct arg_data
*, int);
129 static int finalize_must_preallocate (int, int, struct arg_data
*,
131 static void precompute_arguments (int, int, struct arg_data
*);
132 static int compute_argument_block_size (int, struct args_size
*, int);
133 static void initialize_argument_information (int, struct arg_data
*,
134 struct args_size
*, int, tree
,
135 tree
, CUMULATIVE_ARGS
*, int,
136 rtx
*, int *, int *, int *,
138 static void compute_argument_addresses (struct arg_data
*, rtx
, int);
139 static rtx
rtx_for_function_call (tree
, tree
);
140 static void load_register_parameters (struct arg_data
*, int, rtx
*, int,
142 static rtx
emit_library_call_value_1 (int, rtx
, rtx
, enum libcall_type
,
143 enum machine_mode
, int, va_list);
144 static int special_function_p (tree
, int);
145 static rtx
try_to_integrate (tree
, tree
, rtx
, int, tree
, rtx
);
146 static int check_sibcall_argument_overlap_1 (rtx
);
147 static int check_sibcall_argument_overlap (rtx
, struct arg_data
*, int);
149 static int combine_pending_stack_adjustment_and_call (int, struct args_size
*,
151 static tree
fix_unsafe_tree (tree
);
152 static bool shift_returned_value (tree
, rtx
*);
154 #ifdef REG_PARM_STACK_SPACE
155 static rtx
save_fixed_argument_area (int, rtx
, int *, int *);
156 static void restore_fixed_argument_area (rtx
, rtx
, int, int);
159 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
162 If WHICH is 0, return 1 if EXP contains a call to any function.
163 Actually, we only need return 1 if evaluating EXP would require pushing
164 arguments on the stack, but that is too difficult to compute, so we just
165 assume any function call might require the stack. */
167 static tree calls_function_save_exprs
;
170 calls_function (tree exp
, int which
)
174 calls_function_save_exprs
= 0;
175 val
= calls_function_1 (exp
, which
);
176 calls_function_save_exprs
= 0;
180 /* Recursive function to do the work of above function. */
183 calls_function_1 (tree exp
, int which
)
186 enum tree_code code
= TREE_CODE (exp
);
187 int class = TREE_CODE_CLASS (code
);
188 int length
= first_rtl_op (code
);
190 /* If this code is language-specific, we don't know what it will do. */
191 if ((int) code
>= NUM_TREE_CODES
)
199 else if ((TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))
201 && (TYPE_RETURNS_STACK_DEPRESSED
202 (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))))))
204 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
205 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
207 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
209 & ECF_MAY_BE_ALLOCA
))
218 for (tem
= CONSTRUCTOR_ELTS (exp
); tem
!= 0; tem
= TREE_CHAIN (tem
))
219 if (calls_function_1 (TREE_VALUE (tem
), which
))
226 if (SAVE_EXPR_RTL (exp
) != 0)
228 if (value_member (exp
, calls_function_save_exprs
))
230 calls_function_save_exprs
= tree_cons (NULL_TREE
, exp
,
231 calls_function_save_exprs
);
232 return (TREE_OPERAND (exp
, 0) != 0
233 && calls_function_1 (TREE_OPERAND (exp
, 0), which
));
240 for (local
= BLOCK_VARS (exp
); local
; local
= TREE_CHAIN (local
))
241 if (DECL_INITIAL (local
) != 0
242 && calls_function_1 (DECL_INITIAL (local
), which
))
245 for (subblock
= BLOCK_SUBBLOCKS (exp
);
247 subblock
= TREE_CHAIN (subblock
))
248 if (calls_function_1 (subblock
, which
))
254 for (; exp
!= 0; exp
= TREE_CHAIN (exp
))
255 if (calls_function_1 (TREE_VALUE (exp
), which
))
263 /* Only expressions and blocks can contain calls. */
264 if (! IS_EXPR_CODE_CLASS (class) && class != 'b')
267 for (i
= 0; i
< length
; i
++)
268 if (TREE_OPERAND (exp
, i
) != 0
269 && calls_function_1 (TREE_OPERAND (exp
, i
), which
))
275 /* Force FUNEXP into a form suitable for the address of a CALL,
276 and return that as an rtx. Also load the static chain register
277 if FNDECL is a nested function.
279 CALL_FUSAGE points to a variable holding the prospective
280 CALL_INSN_FUNCTION_USAGE information. */
283 prepare_call_address (rtx funexp
, tree fndecl
, rtx
*call_fusage
,
284 int reg_parm_seen
, int sibcallp
)
286 rtx static_chain_value
= 0;
288 funexp
= protect_from_queue (funexp
, 0);
291 /* Get possible static chain value for nested function in C. */
292 static_chain_value
= lookup_static_chain (fndecl
);
294 /* Make a valid memory address and copy constants through pseudo-regs,
295 but not for a constant address if -fno-function-cse. */
296 if (GET_CODE (funexp
) != SYMBOL_REF
)
297 /* If we are using registers for parameters, force the
298 function address into a register now. */
299 funexp
= ((SMALL_REGISTER_CLASSES
&& reg_parm_seen
)
300 ? force_not_mem (memory_address (FUNCTION_MODE
, funexp
))
301 : memory_address (FUNCTION_MODE
, funexp
));
304 #ifndef NO_FUNCTION_CSE
305 if (optimize
&& ! flag_no_function_cse
)
306 #ifdef NO_RECURSIVE_FUNCTION_CSE
307 if (fndecl
!= current_function_decl
)
309 funexp
= force_reg (Pmode
, funexp
);
313 if (static_chain_value
!= 0)
315 emit_move_insn (static_chain_rtx
, static_chain_value
);
317 if (GET_CODE (static_chain_rtx
) == REG
)
318 use_reg (call_fusage
, static_chain_rtx
);
324 /* Generate instructions to call function FUNEXP,
325 and optionally pop the results.
326 The CALL_INSN is the first insn generated.
328 FNDECL is the declaration node of the function. This is given to the
329 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
331 FUNTYPE is the data type of the function. This is given to the macro
332 RETURN_POPS_ARGS to determine whether this function pops its own args.
333 We used to allow an identifier for library functions, but that doesn't
334 work when the return type is an aggregate type and the calling convention
335 says that the pointer to this aggregate is to be popped by the callee.
337 STACK_SIZE is the number of bytes of arguments on the stack,
338 ROUNDED_STACK_SIZE is that number rounded up to
339 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
340 both to put into the call insn and to generate explicit popping
343 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
344 It is zero if this call doesn't want a structure value.
346 NEXT_ARG_REG is the rtx that results from executing
347 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
348 just after all the args have had their registers assigned.
349 This could be whatever you like, but normally it is the first
350 arg-register beyond those used for args in this call,
351 or 0 if all the arg-registers are used in this call.
352 It is passed on to `gen_call' so you can put this info in the call insn.
354 VALREG is a hard register in which a value is returned,
355 or 0 if the call does not return a value.
357 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
358 the args to this call were processed.
359 We restore `inhibit_defer_pop' to that value.
361 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
362 denote registers used by the called function. */
365 emit_call_1 (rtx funexp
, tree fndecl ATTRIBUTE_UNUSED
, tree funtype ATTRIBUTE_UNUSED
,
366 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED
,
367 HOST_WIDE_INT rounded_stack_size
,
368 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED
,
369 rtx next_arg_reg ATTRIBUTE_UNUSED
, rtx valreg
,
370 int old_inhibit_defer_pop
, rtx call_fusage
, int ecf_flags
,
371 CUMULATIVE_ARGS
*args_so_far ATTRIBUTE_UNUSED
)
373 rtx rounded_stack_size_rtx
= GEN_INT (rounded_stack_size
);
375 int already_popped
= 0;
376 HOST_WIDE_INT n_popped
= RETURN_POPS_ARGS (fndecl
, funtype
, stack_size
);
377 #if defined (HAVE_call) && defined (HAVE_call_value)
378 rtx struct_value_size_rtx
;
379 struct_value_size_rtx
= GEN_INT (struct_value_size
);
382 #ifdef CALL_POPS_ARGS
383 n_popped
+= CALL_POPS_ARGS (* args_so_far
);
386 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
387 and we don't want to load it into a register as an optimization,
388 because prepare_call_address already did it if it should be done. */
389 if (GET_CODE (funexp
) != SYMBOL_REF
)
390 funexp
= memory_address (FUNCTION_MODE
, funexp
);
392 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
393 if ((ecf_flags
& ECF_SIBCALL
)
394 && HAVE_sibcall_pop
&& HAVE_sibcall_value_pop
395 && (n_popped
> 0 || stack_size
== 0))
397 rtx n_pop
= GEN_INT (n_popped
);
400 /* If this subroutine pops its own args, record that in the call insn
401 if possible, for the sake of frame pointer elimination. */
404 pat
= GEN_SIBCALL_VALUE_POP (valreg
,
405 gen_rtx_MEM (FUNCTION_MODE
, funexp
),
406 rounded_stack_size_rtx
, next_arg_reg
,
409 pat
= GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE
, funexp
),
410 rounded_stack_size_rtx
, next_arg_reg
, n_pop
);
412 emit_call_insn (pat
);
418 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
419 /* If the target has "call" or "call_value" insns, then prefer them
420 if no arguments are actually popped. If the target does not have
421 "call" or "call_value" insns, then we must use the popping versions
422 even if the call has no arguments to pop. */
423 #if defined (HAVE_call) && defined (HAVE_call_value)
424 if (HAVE_call
&& HAVE_call_value
&& HAVE_call_pop
&& HAVE_call_value_pop
425 && n_popped
> 0 && ! (ecf_flags
& ECF_SP_DEPRESSED
))
427 if (HAVE_call_pop
&& HAVE_call_value_pop
)
430 rtx n_pop
= GEN_INT (n_popped
);
433 /* If this subroutine pops its own args, record that in the call insn
434 if possible, for the sake of frame pointer elimination. */
437 pat
= GEN_CALL_VALUE_POP (valreg
,
438 gen_rtx_MEM (FUNCTION_MODE
, funexp
),
439 rounded_stack_size_rtx
, next_arg_reg
, n_pop
);
441 pat
= GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE
, funexp
),
442 rounded_stack_size_rtx
, next_arg_reg
, n_pop
);
444 emit_call_insn (pat
);
450 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
451 if ((ecf_flags
& ECF_SIBCALL
)
452 && HAVE_sibcall
&& HAVE_sibcall_value
)
455 emit_call_insn (GEN_SIBCALL_VALUE (valreg
,
456 gen_rtx_MEM (FUNCTION_MODE
, funexp
),
457 rounded_stack_size_rtx
,
458 next_arg_reg
, NULL_RTX
));
460 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE
, funexp
),
461 rounded_stack_size_rtx
, next_arg_reg
,
462 struct_value_size_rtx
));
467 #if defined (HAVE_call) && defined (HAVE_call_value)
468 if (HAVE_call
&& HAVE_call_value
)
471 emit_call_insn (GEN_CALL_VALUE (valreg
,
472 gen_rtx_MEM (FUNCTION_MODE
, funexp
),
473 rounded_stack_size_rtx
, next_arg_reg
,
476 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE
, funexp
),
477 rounded_stack_size_rtx
, next_arg_reg
,
478 struct_value_size_rtx
));
484 /* Find the call we just emitted. */
485 call_insn
= last_call_insn ();
487 /* Mark memory as used for "pure" function call. */
488 if (ecf_flags
& ECF_PURE
)
492 gen_rtx_USE (VOIDmode
,
493 gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
))),
496 /* Put the register usage information there. */
497 add_function_usage_to (call_insn
, call_fusage
);
499 /* If this is a const call, then set the insn's unchanging bit. */
500 if (ecf_flags
& (ECF_CONST
| ECF_PURE
))
501 CONST_OR_PURE_CALL_P (call_insn
) = 1;
503 /* If this call can't throw, attach a REG_EH_REGION reg note to that
505 if (ecf_flags
& ECF_NOTHROW
)
506 REG_NOTES (call_insn
) = gen_rtx_EXPR_LIST (REG_EH_REGION
, const0_rtx
,
507 REG_NOTES (call_insn
));
509 note_eh_region_may_contain_throw ();
511 if (ecf_flags
& ECF_NORETURN
)
512 REG_NOTES (call_insn
) = gen_rtx_EXPR_LIST (REG_NORETURN
, const0_rtx
,
513 REG_NOTES (call_insn
));
514 if (ecf_flags
& ECF_ALWAYS_RETURN
)
515 REG_NOTES (call_insn
) = gen_rtx_EXPR_LIST (REG_ALWAYS_RETURN
, const0_rtx
,
516 REG_NOTES (call_insn
));
518 if (ecf_flags
& ECF_RETURNS_TWICE
)
520 REG_NOTES (call_insn
) = gen_rtx_EXPR_LIST (REG_SETJMP
, const0_rtx
,
521 REG_NOTES (call_insn
));
522 current_function_calls_setjmp
= 1;
525 SIBLING_CALL_P (call_insn
) = ((ecf_flags
& ECF_SIBCALL
) != 0);
527 /* Restore this now, so that we do defer pops for this call's args
528 if the context of the call as a whole permits. */
529 inhibit_defer_pop
= old_inhibit_defer_pop
;
534 CALL_INSN_FUNCTION_USAGE (call_insn
)
535 = gen_rtx_EXPR_LIST (VOIDmode
,
536 gen_rtx_CLOBBER (VOIDmode
, stack_pointer_rtx
),
537 CALL_INSN_FUNCTION_USAGE (call_insn
));
538 rounded_stack_size
-= n_popped
;
539 rounded_stack_size_rtx
= GEN_INT (rounded_stack_size
);
540 stack_pointer_delta
-= n_popped
;
543 if (!ACCUMULATE_OUTGOING_ARGS
)
545 /* If returning from the subroutine does not automatically pop the args,
546 we need an instruction to pop them sooner or later.
547 Perhaps do it now; perhaps just record how much space to pop later.
549 If returning from the subroutine does pop the args, indicate that the
550 stack pointer will be changed. */
552 if (rounded_stack_size
!= 0)
554 if (ecf_flags
& (ECF_SP_DEPRESSED
| ECF_NORETURN
| ECF_LONGJMP
))
555 /* Just pretend we did the pop. */
556 stack_pointer_delta
-= rounded_stack_size
;
557 else if (flag_defer_pop
&& inhibit_defer_pop
== 0
558 && ! (ecf_flags
& (ECF_CONST
| ECF_PURE
)))
559 pending_stack_adjust
+= rounded_stack_size
;
561 adjust_stack (rounded_stack_size_rtx
);
564 /* When we accumulate outgoing args, we must avoid any stack manipulations.
565 Restore the stack pointer to its original value now. Usually
566 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
567 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
568 popping variants of functions exist as well.
570 ??? We may optimize similar to defer_pop above, but it is
571 probably not worthwhile.
573 ??? It will be worthwhile to enable combine_stack_adjustments even for
576 anti_adjust_stack (GEN_INT (n_popped
));
579 /* Determine if the function identified by NAME and FNDECL is one with
580 special properties we wish to know about.
582 For example, if the function might return more than one time (setjmp), then
583 set RETURNS_TWICE to a nonzero value.
585 Similarly set LONGJMP for if the function is in the longjmp family.
587 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
588 space from the stack such as alloca. */
591 special_function_p (tree fndecl
, int flags
)
593 if (! (flags
& ECF_MALLOC
)
594 && fndecl
&& DECL_NAME (fndecl
)
595 && IDENTIFIER_LENGTH (DECL_NAME (fndecl
)) <= 17
596 /* Exclude functions not at the file scope, or not `extern',
597 since they are not the magic functions we would otherwise
599 FIXME: this should be handled with attributes, not with this
600 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
601 because you can declare fork() inside a function if you
603 && (DECL_CONTEXT (fndecl
) == NULL_TREE
604 || TREE_CODE (DECL_CONTEXT (fndecl
)) == TRANSLATION_UNIT_DECL
)
605 && TREE_PUBLIC (fndecl
))
607 const char *name
= IDENTIFIER_POINTER (DECL_NAME (fndecl
));
608 const char *tname
= name
;
610 /* We assume that alloca will always be called by name. It
611 makes no sense to pass it as a pointer-to-function to
612 anything that does not understand its behavior. */
613 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl
)) == 6
615 && ! strcmp (name
, "alloca"))
616 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl
)) == 16
618 && ! strcmp (name
, "__builtin_alloca"))))
619 flags
|= ECF_MAY_BE_ALLOCA
;
621 /* Disregard prefix _, __ or __x. */
624 if (name
[1] == '_' && name
[2] == 'x')
626 else if (name
[1] == '_')
635 && (! strcmp (tname
, "setjmp")
636 || ! strcmp (tname
, "setjmp_syscall")))
638 && ! strcmp (tname
, "sigsetjmp"))
640 && ! strcmp (tname
, "savectx")))
641 flags
|= ECF_RETURNS_TWICE
;
644 && ! strcmp (tname
, "siglongjmp"))
645 flags
|= ECF_LONGJMP
;
647 else if ((tname
[0] == 'q' && tname
[1] == 's'
648 && ! strcmp (tname
, "qsetjmp"))
649 || (tname
[0] == 'v' && tname
[1] == 'f'
650 && ! strcmp (tname
, "vfork")))
651 flags
|= ECF_RETURNS_TWICE
;
653 else if (tname
[0] == 'l' && tname
[1] == 'o'
654 && ! strcmp (tname
, "longjmp"))
655 flags
|= ECF_LONGJMP
;
657 else if ((tname
[0] == 'f' && tname
[1] == 'o'
658 && ! strcmp (tname
, "fork"))
659 /* Linux specific: __clone. check NAME to insist on the
660 leading underscores, to avoid polluting the ISO / POSIX
662 || (name
[0] == '_' && name
[1] == '_'
663 && ! strcmp (tname
, "clone"))
664 || (tname
[0] == 'e' && tname
[1] == 'x' && tname
[2] == 'e'
665 && tname
[3] == 'c' && (tname
[4] == 'l' || tname
[4] == 'v')
667 || ((tname
[5] == 'p' || tname
[5] == 'e')
668 && tname
[6] == '\0'))))
669 flags
|= ECF_FORK_OR_EXEC
;
674 /* Return nonzero when tree represent call to longjmp. */
677 setjmp_call_p (tree fndecl
)
679 return special_function_p (fndecl
, 0) & ECF_RETURNS_TWICE
;
682 /* Return true when exp contains alloca call. */
684 alloca_call_p (tree exp
)
686 if (TREE_CODE (exp
) == CALL_EXPR
687 && TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
688 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
690 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
691 0) & ECF_MAY_BE_ALLOCA
))
696 /* Detect flags (function attributes) from the function decl or type node. */
699 flags_from_decl_or_type (tree exp
)
706 struct cgraph_rtl_info
*i
= cgraph_rtl_info (exp
);
707 type
= TREE_TYPE (exp
);
711 if (i
->pure_function
)
712 flags
|= ECF_PURE
| ECF_LIBCALL_BLOCK
;
713 if (i
->const_function
)
714 flags
|= ECF_CONST
| ECF_LIBCALL_BLOCK
;
717 /* The function exp may have the `malloc' attribute. */
718 if (DECL_IS_MALLOC (exp
))
721 /* The function exp may have the `pure' attribute. */
722 if (DECL_IS_PURE (exp
))
723 flags
|= ECF_PURE
| ECF_LIBCALL_BLOCK
;
725 if (TREE_NOTHROW (exp
))
726 flags
|= ECF_NOTHROW
;
728 if (TREE_READONLY (exp
) && ! TREE_THIS_VOLATILE (exp
))
729 flags
|= ECF_LIBCALL_BLOCK
;
732 if (TREE_READONLY (exp
) && ! TREE_THIS_VOLATILE (exp
))
735 if (TREE_THIS_VOLATILE (exp
))
736 flags
|= ECF_NORETURN
;
738 /* Mark if the function returns with the stack pointer depressed. We
739 cannot consider it pure or constant in that case. */
740 if (TREE_CODE (type
) == FUNCTION_TYPE
&& TYPE_RETURNS_STACK_DEPRESSED (type
))
742 flags
|= ECF_SP_DEPRESSED
;
743 flags
&= ~(ECF_PURE
| ECF_CONST
| ECF_LIBCALL_BLOCK
);
749 /* Detect flags from a CALL_EXPR. */
752 call_expr_flags (tree t
)
755 tree decl
= get_callee_fndecl (t
);
758 flags
= flags_from_decl_or_type (decl
);
761 t
= TREE_TYPE (TREE_OPERAND (t
, 0));
762 if (t
&& TREE_CODE (t
) == POINTER_TYPE
)
763 flags
= flags_from_decl_or_type (TREE_TYPE (t
));
771 /* Precompute all register parameters as described by ARGS, storing values
772 into fields within the ARGS array.
774 NUM_ACTUALS indicates the total number elements in the ARGS array.
776 Set REG_PARM_SEEN if we encounter a register parameter. */
779 precompute_register_parameters (int num_actuals
, struct arg_data
*args
, int *reg_parm_seen
)
785 for (i
= 0; i
< num_actuals
; i
++)
786 if (args
[i
].reg
!= 0 && ! args
[i
].pass_on_stack
)
790 if (args
[i
].value
== 0)
793 args
[i
].value
= expand_expr (args
[i
].tree_value
, NULL_RTX
,
795 preserve_temp_slots (args
[i
].value
);
798 /* ANSI doesn't require a sequence point here,
799 but PCC has one, so this will avoid some problems. */
803 /* If the value is a non-legitimate constant, force it into a
804 pseudo now. TLS symbols sometimes need a call to resolve. */
805 if (CONSTANT_P (args
[i
].value
)
806 && !LEGITIMATE_CONSTANT_P (args
[i
].value
))
807 args
[i
].value
= force_reg (args
[i
].mode
, args
[i
].value
);
809 /* If we are to promote the function arg to a wider mode,
812 if (args
[i
].mode
!= TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)))
814 = convert_modes (args
[i
].mode
,
815 TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)),
816 args
[i
].value
, args
[i
].unsignedp
);
818 /* If the value is expensive, and we are inside an appropriately
819 short loop, put the value into a pseudo and then put the pseudo
822 For small register classes, also do this if this call uses
823 register parameters. This is to avoid reload conflicts while
824 loading the parameters registers. */
826 if ((! (GET_CODE (args
[i
].value
) == REG
827 || (GET_CODE (args
[i
].value
) == SUBREG
828 && GET_CODE (SUBREG_REG (args
[i
].value
)) == REG
)))
829 && args
[i
].mode
!= BLKmode
830 && rtx_cost (args
[i
].value
, SET
) > COSTS_N_INSNS (1)
831 && ((SMALL_REGISTER_CLASSES
&& *reg_parm_seen
)
832 || preserve_subexpressions_p ()))
833 args
[i
].value
= copy_to_mode_reg (args
[i
].mode
, args
[i
].value
);
837 #ifdef REG_PARM_STACK_SPACE
839 /* The argument list is the property of the called routine and it
840 may clobber it. If the fixed area has been used for previous
841 parameters, we must save and restore it. */
844 save_fixed_argument_area (int reg_parm_stack_space
, rtx argblock
, int *low_to_save
, int *high_to_save
)
849 /* Compute the boundary of the area that needs to be saved, if any. */
850 high
= reg_parm_stack_space
;
851 #ifdef ARGS_GROW_DOWNWARD
854 if (high
> highest_outgoing_arg_in_use
)
855 high
= highest_outgoing_arg_in_use
;
857 for (low
= 0; low
< high
; low
++)
858 if (stack_usage_map
[low
] != 0)
861 enum machine_mode save_mode
;
866 while (stack_usage_map
[--high
] == 0)
870 *high_to_save
= high
;
872 num_to_save
= high
- low
+ 1;
873 save_mode
= mode_for_size (num_to_save
* BITS_PER_UNIT
, MODE_INT
, 1);
875 /* If we don't have the required alignment, must do this
877 if ((low
& (MIN (GET_MODE_SIZE (save_mode
),
878 BIGGEST_ALIGNMENT
/ UNITS_PER_WORD
) - 1)))
881 #ifdef ARGS_GROW_DOWNWARD
886 stack_area
= gen_rtx_MEM (save_mode
,
887 memory_address (save_mode
,
888 plus_constant (argblock
,
891 set_mem_align (stack_area
, PARM_BOUNDARY
);
892 if (save_mode
== BLKmode
)
894 save_area
= assign_stack_temp (BLKmode
, num_to_save
, 0);
895 emit_block_move (validize_mem (save_area
), stack_area
,
896 GEN_INT (num_to_save
), BLOCK_OP_CALL_PARM
);
900 save_area
= gen_reg_rtx (save_mode
);
901 emit_move_insn (save_area
, stack_area
);
911 restore_fixed_argument_area (rtx save_area
, rtx argblock
, int high_to_save
, int low_to_save
)
913 enum machine_mode save_mode
= GET_MODE (save_area
);
917 #ifdef ARGS_GROW_DOWNWARD
918 delta
= -high_to_save
;
922 stack_area
= gen_rtx_MEM (save_mode
,
923 memory_address (save_mode
,
924 plus_constant (argblock
, delta
)));
925 set_mem_align (stack_area
, PARM_BOUNDARY
);
927 if (save_mode
!= BLKmode
)
928 emit_move_insn (stack_area
, save_area
);
930 emit_block_move (stack_area
, validize_mem (save_area
),
931 GEN_INT (high_to_save
- low_to_save
+ 1),
934 #endif /* REG_PARM_STACK_SPACE */
936 /* If any elements in ARGS refer to parameters that are to be passed in
937 registers, but not in memory, and whose alignment does not permit a
938 direct copy into registers. Copy the values into a group of pseudos
939 which we will later copy into the appropriate hard registers.
941 Pseudos for each unaligned argument will be stored into the array
942 args[argnum].aligned_regs. The caller is responsible for deallocating
943 the aligned_regs array if it is nonzero. */
946 store_unaligned_arguments_into_pseudos (struct arg_data
*args
, int num_actuals
)
950 for (i
= 0; i
< num_actuals
; i
++)
951 if (args
[i
].reg
!= 0 && ! args
[i
].pass_on_stack
952 && args
[i
].mode
== BLKmode
953 && (TYPE_ALIGN (TREE_TYPE (args
[i
].tree_value
))
954 < (unsigned int) MIN (BIGGEST_ALIGNMENT
, BITS_PER_WORD
)))
956 int bytes
= int_size_in_bytes (TREE_TYPE (args
[i
].tree_value
));
957 int nregs
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
958 int endian_correction
= 0;
960 args
[i
].n_aligned_regs
= args
[i
].partial
? args
[i
].partial
: nregs
;
961 args
[i
].aligned_regs
= xmalloc (sizeof (rtx
) * args
[i
].n_aligned_regs
);
963 /* Structures smaller than a word are normally aligned to the
964 least significant byte. On a BYTES_BIG_ENDIAN machine,
965 this means we must skip the empty high order bytes when
966 calculating the bit offset. */
967 if (bytes
< UNITS_PER_WORD
968 #ifdef BLOCK_REG_PADDING
969 && (BLOCK_REG_PADDING (args
[i
].mode
,
970 TREE_TYPE (args
[i
].tree_value
), 1)
976 endian_correction
= BITS_PER_WORD
- bytes
* BITS_PER_UNIT
;
978 for (j
= 0; j
< args
[i
].n_aligned_regs
; j
++)
980 rtx reg
= gen_reg_rtx (word_mode
);
981 rtx word
= operand_subword_force (args
[i
].value
, j
, BLKmode
);
982 int bitsize
= MIN (bytes
* BITS_PER_UNIT
, BITS_PER_WORD
);
984 args
[i
].aligned_regs
[j
] = reg
;
985 word
= extract_bit_field (word
, bitsize
, 0, 1, NULL_RTX
,
986 word_mode
, word_mode
, BITS_PER_WORD
);
988 /* There is no need to restrict this code to loading items
989 in TYPE_ALIGN sized hunks. The bitfield instructions can
990 load up entire word sized registers efficiently.
992 ??? This may not be needed anymore.
993 We use to emit a clobber here but that doesn't let later
994 passes optimize the instructions we emit. By storing 0 into
995 the register later passes know the first AND to zero out the
996 bitfield being set in the register is unnecessary. The store
997 of 0 will be deleted as will at least the first AND. */
999 emit_move_insn (reg
, const0_rtx
);
1001 bytes
-= bitsize
/ BITS_PER_UNIT
;
1002 store_bit_field (reg
, bitsize
, endian_correction
, word_mode
,
1003 word
, BITS_PER_WORD
);
1008 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1011 NUM_ACTUALS is the total number of parameters.
1013 N_NAMED_ARGS is the total number of named arguments.
1015 FNDECL is the tree code for the target of this call (if known)
1017 ARGS_SO_FAR holds state needed by the target to know where to place
1020 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1021 for arguments which are passed in registers.
1023 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1024 and may be modified by this routine.
1026 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1027 flags which may may be modified by this routine.
1029 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
1030 the thunked-to function. */
1033 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED
,
1034 struct arg_data
*args
,
1035 struct args_size
*args_size
,
1036 int n_named_args ATTRIBUTE_UNUSED
,
1037 tree actparms
, tree fndecl
,
1038 CUMULATIVE_ARGS
*args_so_far
,
1039 int reg_parm_stack_space
,
1040 rtx
*old_stack_level
, int *old_pending_adj
,
1041 int *must_preallocate
, int *ecf_flags
,
1042 bool call_from_thunk_p
)
1044 /* 1 if scanning parms front to back, -1 if scanning back to front. */
1047 /* Count arg position in order args appear. */
1053 args_size
->constant
= 0;
1056 /* In this loop, we consider args in the order they are written.
1057 We fill up ARGS from the front or from the back if necessary
1058 so that in any case the first arg to be pushed ends up at the front. */
1060 if (PUSH_ARGS_REVERSED
)
1062 i
= num_actuals
- 1, inc
= -1;
1063 /* In this case, must reverse order of args
1064 so that we compute and push the last arg first. */
1071 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1072 for (p
= actparms
, argpos
= 0; p
; p
= TREE_CHAIN (p
), i
+= inc
, argpos
++)
1074 tree type
= TREE_TYPE (TREE_VALUE (p
));
1076 enum machine_mode mode
;
1078 args
[i
].tree_value
= TREE_VALUE (p
);
1080 /* Replace erroneous argument with constant zero. */
1081 if (type
== error_mark_node
|| !COMPLETE_TYPE_P (type
))
1082 args
[i
].tree_value
= integer_zero_node
, type
= integer_type_node
;
1084 /* If TYPE is a transparent union, pass things the way we would
1085 pass the first field of the union. We have already verified that
1086 the modes are the same. */
1087 if (TREE_CODE (type
) == UNION_TYPE
&& TYPE_TRANSPARENT_UNION (type
))
1088 type
= TREE_TYPE (TYPE_FIELDS (type
));
1090 /* Decide where to pass this arg.
1092 args[i].reg is nonzero if all or part is passed in registers.
1094 args[i].partial is nonzero if part but not all is passed in registers,
1095 and the exact value says how many words are passed in registers.
1097 args[i].pass_on_stack is nonzero if the argument must at least be
1098 computed on the stack. It may then be loaded back into registers
1099 if args[i].reg is nonzero.
1101 These decisions are driven by the FUNCTION_... macros and must agree
1102 with those made by function.c. */
1104 /* See if this argument should be passed by invisible reference. */
1105 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type
))
1106 || TREE_ADDRESSABLE (type
)
1107 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1108 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far
, TYPE_MODE (type
),
1109 type
, argpos
< n_named_args
)
1113 /* If we're compiling a thunk, pass through invisible
1114 references instead of making a copy. */
1115 if (call_from_thunk_p
1116 #ifdef FUNCTION_ARG_CALLEE_COPIES
1117 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far
, TYPE_MODE (type
),
1118 type
, argpos
< n_named_args
)
1119 /* If it's in a register, we must make a copy of it too. */
1120 /* ??? Is this a sufficient test? Is there a better one? */
1121 && !(TREE_CODE (args
[i
].tree_value
) == VAR_DECL
1122 && REG_P (DECL_RTL (args
[i
].tree_value
)))
1123 && ! TREE_ADDRESSABLE (type
))
1127 /* C++ uses a TARGET_EXPR to indicate that we want to make a
1128 new object from the argument. If we are passing by
1129 invisible reference, the callee will do that for us, so we
1130 can strip off the TARGET_EXPR. This is not always safe,
1131 but it is safe in the only case where this is a useful
1132 optimization; namely, when the argument is a plain object.
1133 In that case, the frontend is just asking the backend to
1134 make a bitwise copy of the argument. */
1136 if (TREE_CODE (args
[i
].tree_value
) == TARGET_EXPR
1137 && (DECL_P (TREE_OPERAND (args
[i
].tree_value
, 1)))
1138 && ! REG_P (DECL_RTL (TREE_OPERAND (args
[i
].tree_value
, 1))))
1139 args
[i
].tree_value
= TREE_OPERAND (args
[i
].tree_value
, 1);
1141 args
[i
].tree_value
= build1 (ADDR_EXPR
,
1142 build_pointer_type (type
),
1143 args
[i
].tree_value
);
1144 type
= build_pointer_type (type
);
1146 else if (TREE_CODE (args
[i
].tree_value
) == TARGET_EXPR
)
1148 /* In the V3 C++ ABI, parameters are destroyed in the caller.
1149 We implement this by passing the address of the temporary
1150 rather than expanding it into another allocated slot. */
1151 args
[i
].tree_value
= build1 (ADDR_EXPR
,
1152 build_pointer_type (type
),
1153 args
[i
].tree_value
);
1154 type
= build_pointer_type (type
);
1158 /* We make a copy of the object and pass the address to the
1159 function being called. */
1162 if (!COMPLETE_TYPE_P (type
)
1163 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
1164 || (flag_stack_check
&& ! STACK_CHECK_BUILTIN
1165 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type
),
1166 STACK_CHECK_MAX_VAR_SIZE
))))
1168 /* This is a variable-sized object. Make space on the stack
1170 rtx size_rtx
= expr_size (TREE_VALUE (p
));
1172 if (*old_stack_level
== 0)
1174 emit_stack_save (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
1175 *old_pending_adj
= pending_stack_adjust
;
1176 pending_stack_adjust
= 0;
1179 copy
= gen_rtx_MEM (BLKmode
,
1180 allocate_dynamic_stack_space
1181 (size_rtx
, NULL_RTX
, TYPE_ALIGN (type
)));
1182 set_mem_attributes (copy
, type
, 1);
1185 copy
= assign_temp (type
, 0, 1, 0);
1187 store_expr (args
[i
].tree_value
, copy
, 0);
1188 *ecf_flags
&= ~(ECF_CONST
| ECF_PURE
| ECF_LIBCALL_BLOCK
);
1190 args
[i
].tree_value
= build1 (ADDR_EXPR
,
1191 build_pointer_type (type
),
1192 make_tree (type
, copy
));
1193 type
= build_pointer_type (type
);
1197 mode
= TYPE_MODE (type
);
1198 unsignedp
= TREE_UNSIGNED (type
);
1200 if (targetm
.calls
.promote_function_args (fndecl
? TREE_TYPE (fndecl
) : 0))
1201 mode
= promote_mode (type
, mode
, &unsignedp
, 1);
1203 args
[i
].unsignedp
= unsignedp
;
1204 args
[i
].mode
= mode
;
1206 args
[i
].reg
= FUNCTION_ARG (*args_so_far
, mode
, type
,
1207 argpos
< n_named_args
);
1208 #ifdef FUNCTION_INCOMING_ARG
1209 /* If this is a sibling call and the machine has register windows, the
1210 register window has to be unwinded before calling the routine, so
1211 arguments have to go into the incoming registers. */
1212 args
[i
].tail_call_reg
= FUNCTION_INCOMING_ARG (*args_so_far
, mode
, type
,
1213 argpos
< n_named_args
);
1215 args
[i
].tail_call_reg
= args
[i
].reg
;
1218 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1221 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far
, mode
, type
,
1222 argpos
< n_named_args
);
1225 args
[i
].pass_on_stack
= MUST_PASS_IN_STACK (mode
, type
);
1227 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1228 it means that we are to pass this arg in the register(s) designated
1229 by the PARALLEL, but also to pass it in the stack. */
1230 if (args
[i
].reg
&& GET_CODE (args
[i
].reg
) == PARALLEL
1231 && XEXP (XVECEXP (args
[i
].reg
, 0, 0), 0) == 0)
1232 args
[i
].pass_on_stack
= 1;
1234 /* If this is an addressable type, we must preallocate the stack
1235 since we must evaluate the object into its final location.
1237 If this is to be passed in both registers and the stack, it is simpler
1239 if (TREE_ADDRESSABLE (type
)
1240 || (args
[i
].pass_on_stack
&& args
[i
].reg
!= 0))
1241 *must_preallocate
= 1;
1243 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1244 we cannot consider this function call constant. */
1245 if (TREE_ADDRESSABLE (type
))
1246 *ecf_flags
&= ~ECF_LIBCALL_BLOCK
;
1248 /* Compute the stack-size of this argument. */
1249 if (args
[i
].reg
== 0 || args
[i
].partial
!= 0
1250 || reg_parm_stack_space
> 0
1251 || args
[i
].pass_on_stack
)
1252 locate_and_pad_parm (mode
, type
,
1253 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1258 args
[i
].pass_on_stack
? 0 : args
[i
].partial
,
1259 fndecl
, args_size
, &args
[i
].locate
);
1260 #ifdef BLOCK_REG_PADDING
1262 /* The argument is passed entirely in registers. See at which
1263 end it should be padded. */
1264 args
[i
].locate
.where_pad
=
1265 BLOCK_REG_PADDING (mode
, type
,
1266 int_size_in_bytes (type
) <= UNITS_PER_WORD
);
1269 /* Update ARGS_SIZE, the total stack space for args so far. */
1271 args_size
->constant
+= args
[i
].locate
.size
.constant
;
1272 if (args
[i
].locate
.size
.var
)
1273 ADD_PARM_SIZE (*args_size
, args
[i
].locate
.size
.var
);
1275 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1276 have been used, etc. */
1278 FUNCTION_ARG_ADVANCE (*args_so_far
, TYPE_MODE (type
), type
,
1279 argpos
< n_named_args
);
1283 /* Update ARGS_SIZE to contain the total size for the argument block.
1284 Return the original constant component of the argument block's size.
1286 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1287 for arguments passed in registers. */
1290 compute_argument_block_size (int reg_parm_stack_space
,
1291 struct args_size
*args_size
,
1292 int preferred_stack_boundary ATTRIBUTE_UNUSED
)
1294 int unadjusted_args_size
= args_size
->constant
;
1296 /* For accumulate outgoing args mode we don't need to align, since the frame
1297 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1298 backends from generating misaligned frame sizes. */
1299 if (ACCUMULATE_OUTGOING_ARGS
&& preferred_stack_boundary
> STACK_BOUNDARY
)
1300 preferred_stack_boundary
= STACK_BOUNDARY
;
1302 /* Compute the actual size of the argument block required. The variable
1303 and constant sizes must be combined, the size may have to be rounded,
1304 and there may be a minimum required size. */
1308 args_size
->var
= ARGS_SIZE_TREE (*args_size
);
1309 args_size
->constant
= 0;
1311 preferred_stack_boundary
/= BITS_PER_UNIT
;
1312 if (preferred_stack_boundary
> 1)
1314 /* We don't handle this case yet. To handle it correctly we have
1315 to add the delta, round and subtract the delta.
1316 Currently no machine description requires this support. */
1317 if (stack_pointer_delta
& (preferred_stack_boundary
- 1))
1319 args_size
->var
= round_up (args_size
->var
, preferred_stack_boundary
);
1322 if (reg_parm_stack_space
> 0)
1325 = size_binop (MAX_EXPR
, args_size
->var
,
1326 ssize_int (reg_parm_stack_space
));
1328 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1329 /* The area corresponding to register parameters is not to count in
1330 the size of the block we need. So make the adjustment. */
1332 = size_binop (MINUS_EXPR
, args_size
->var
,
1333 ssize_int (reg_parm_stack_space
));
1339 preferred_stack_boundary
/= BITS_PER_UNIT
;
1340 if (preferred_stack_boundary
< 1)
1341 preferred_stack_boundary
= 1;
1342 args_size
->constant
= (((args_size
->constant
1343 + stack_pointer_delta
1344 + preferred_stack_boundary
- 1)
1345 / preferred_stack_boundary
1346 * preferred_stack_boundary
)
1347 - stack_pointer_delta
);
1349 args_size
->constant
= MAX (args_size
->constant
,
1350 reg_parm_stack_space
);
1352 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1353 args_size
->constant
-= reg_parm_stack_space
;
1356 return unadjusted_args_size
;
1359 /* Precompute parameters as needed for a function call.
1361 FLAGS is mask of ECF_* constants.
1363 NUM_ACTUALS is the number of arguments.
1365 ARGS is an array containing information for each argument; this
1366 routine fills in the INITIAL_VALUE and VALUE fields for each
1367 precomputed argument. */
1370 precompute_arguments (int flags
, int num_actuals
, struct arg_data
*args
)
1374 /* If this is a libcall, then precompute all arguments so that we do not
1375 get extraneous instructions emitted as part of the libcall sequence.
1377 If this target defines ACCUMULATE_OUTGOING_ARGS to true, then we must
1378 precompute all arguments that contain function calls. Otherwise,
1379 computing arguments for a subcall may clobber arguments for this call.
1381 If this target defines ACCUMULATE_OUTGOING_ARGS to false, then we only
1382 need to precompute arguments that change the stack pointer, such as calls
1383 to alloca, and calls that do not pop all of their arguments. */
1385 for (i
= 0; i
< num_actuals
; i
++)
1386 if ((flags
& ECF_LIBCALL_BLOCK
)
1387 || calls_function (args
[i
].tree_value
, !ACCUMULATE_OUTGOING_ARGS
))
1389 enum machine_mode mode
;
1391 /* If this is an addressable type, we cannot pre-evaluate it. */
1392 if (TREE_ADDRESSABLE (TREE_TYPE (args
[i
].tree_value
)))
1396 = expand_expr (args
[i
].tree_value
, NULL_RTX
, VOIDmode
, 0);
1398 /* ANSI doesn't require a sequence point here,
1399 but PCC has one, so this will avoid some problems. */
1402 args
[i
].initial_value
= args
[i
].value
1403 = protect_from_queue (args
[i
].value
, 0);
1405 mode
= TYPE_MODE (TREE_TYPE (args
[i
].tree_value
));
1406 if (mode
!= args
[i
].mode
)
1409 = convert_modes (args
[i
].mode
, mode
,
1410 args
[i
].value
, args
[i
].unsignedp
);
1411 #ifdef PROMOTE_FOR_CALL_ONLY
1412 /* CSE will replace this only if it contains args[i].value
1413 pseudo, so convert it down to the declared mode using
1415 if (GET_CODE (args
[i
].value
) == REG
1416 && GET_MODE_CLASS (args
[i
].mode
) == MODE_INT
)
1418 args
[i
].initial_value
1419 = gen_lowpart_SUBREG (mode
, args
[i
].value
);
1420 SUBREG_PROMOTED_VAR_P (args
[i
].initial_value
) = 1;
1421 SUBREG_PROMOTED_UNSIGNED_SET (args
[i
].initial_value
,
1429 /* Given the current state of MUST_PREALLOCATE and information about
1430 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1431 compute and return the final value for MUST_PREALLOCATE. */
1434 finalize_must_preallocate (int must_preallocate
, int num_actuals
, struct arg_data
*args
, struct args_size
*args_size
)
1436 /* See if we have or want to preallocate stack space.
1438 If we would have to push a partially-in-regs parm
1439 before other stack parms, preallocate stack space instead.
1441 If the size of some parm is not a multiple of the required stack
1442 alignment, we must preallocate.
1444 If the total size of arguments that would otherwise create a copy in
1445 a temporary (such as a CALL) is more than half the total argument list
1446 size, preallocation is faster.
1448 Another reason to preallocate is if we have a machine (like the m88k)
1449 where stack alignment is required to be maintained between every
1450 pair of insns, not just when the call is made. However, we assume here
1451 that such machines either do not have push insns (and hence preallocation
1452 would occur anyway) or the problem is taken care of with
1455 if (! must_preallocate
)
1457 int partial_seen
= 0;
1458 int copy_to_evaluate_size
= 0;
1461 for (i
= 0; i
< num_actuals
&& ! must_preallocate
; i
++)
1463 if (args
[i
].partial
> 0 && ! args
[i
].pass_on_stack
)
1465 else if (partial_seen
&& args
[i
].reg
== 0)
1466 must_preallocate
= 1;
1468 if (TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)) == BLKmode
1469 && (TREE_CODE (args
[i
].tree_value
) == CALL_EXPR
1470 || TREE_CODE (args
[i
].tree_value
) == TARGET_EXPR
1471 || TREE_CODE (args
[i
].tree_value
) == COND_EXPR
1472 || TREE_ADDRESSABLE (TREE_TYPE (args
[i
].tree_value
))))
1473 copy_to_evaluate_size
1474 += int_size_in_bytes (TREE_TYPE (args
[i
].tree_value
));
1477 if (copy_to_evaluate_size
* 2 >= args_size
->constant
1478 && args_size
->constant
> 0)
1479 must_preallocate
= 1;
1481 return must_preallocate
;
1484 /* If we preallocated stack space, compute the address of each argument
1485 and store it into the ARGS array.
1487 We need not ensure it is a valid memory address here; it will be
1488 validized when it is used.
1490 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1493 compute_argument_addresses (struct arg_data
*args
, rtx argblock
, int num_actuals
)
1497 rtx arg_reg
= argblock
;
1498 int i
, arg_offset
= 0;
1500 if (GET_CODE (argblock
) == PLUS
)
1501 arg_reg
= XEXP (argblock
, 0), arg_offset
= INTVAL (XEXP (argblock
, 1));
1503 for (i
= 0; i
< num_actuals
; i
++)
1505 rtx offset
= ARGS_SIZE_RTX (args
[i
].locate
.offset
);
1506 rtx slot_offset
= ARGS_SIZE_RTX (args
[i
].locate
.slot_offset
);
1509 /* Skip this parm if it will not be passed on the stack. */
1510 if (! args
[i
].pass_on_stack
&& args
[i
].reg
!= 0)
1513 if (GET_CODE (offset
) == CONST_INT
)
1514 addr
= plus_constant (arg_reg
, INTVAL (offset
));
1516 addr
= gen_rtx_PLUS (Pmode
, arg_reg
, offset
);
1518 addr
= plus_constant (addr
, arg_offset
);
1519 args
[i
].stack
= gen_rtx_MEM (args
[i
].mode
, addr
);
1520 set_mem_align (args
[i
].stack
, PARM_BOUNDARY
);
1521 set_mem_attributes (args
[i
].stack
,
1522 TREE_TYPE (args
[i
].tree_value
), 1);
1524 if (GET_CODE (slot_offset
) == CONST_INT
)
1525 addr
= plus_constant (arg_reg
, INTVAL (slot_offset
));
1527 addr
= gen_rtx_PLUS (Pmode
, arg_reg
, slot_offset
);
1529 addr
= plus_constant (addr
, arg_offset
);
1530 args
[i
].stack_slot
= gen_rtx_MEM (args
[i
].mode
, addr
);
1531 set_mem_align (args
[i
].stack_slot
, PARM_BOUNDARY
);
1532 set_mem_attributes (args
[i
].stack_slot
,
1533 TREE_TYPE (args
[i
].tree_value
), 1);
1535 /* Function incoming arguments may overlap with sibling call
1536 outgoing arguments and we cannot allow reordering of reads
1537 from function arguments with stores to outgoing arguments
1538 of sibling calls. */
1539 set_mem_alias_set (args
[i
].stack
, 0);
1540 set_mem_alias_set (args
[i
].stack_slot
, 0);
1545 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1546 in a call instruction.
1548 FNDECL is the tree node for the target function. For an indirect call
1549 FNDECL will be NULL_TREE.
1551 ADDR is the operand 0 of CALL_EXPR for this call. */
1554 rtx_for_function_call (tree fndecl
, tree addr
)
1558 /* Get the function to call, in the form of RTL. */
1561 /* If this is the first use of the function, see if we need to
1562 make an external definition for it. */
1563 if (! TREE_USED (fndecl
))
1565 assemble_external (fndecl
);
1566 TREE_USED (fndecl
) = 1;
1569 /* Get a SYMBOL_REF rtx for the function address. */
1570 funexp
= XEXP (DECL_RTL (fndecl
), 0);
1573 /* Generate an rtx (probably a pseudo-register) for the address. */
1576 funexp
= expand_expr (addr
, NULL_RTX
, VOIDmode
, 0);
1577 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1583 /* Do the register loads required for any wholly-register parms or any
1584 parms which are passed both on the stack and in a register. Their
1585 expressions were already evaluated.
1587 Mark all register-parms as living through the call, putting these USE
1588 insns in the CALL_INSN_FUNCTION_USAGE field.
1590 When IS_SIBCALL, perform the check_sibcall_overlap_argument_overlap
1591 checking, setting *SIBCALL_FAILURE if appropriate. */
1594 load_register_parameters (struct arg_data
*args
, int num_actuals
,
1595 rtx
*call_fusage
, int flags
, int is_sibcall
,
1596 int *sibcall_failure
)
1600 for (i
= 0; i
< num_actuals
; i
++)
1602 rtx reg
= ((flags
& ECF_SIBCALL
)
1603 ? args
[i
].tail_call_reg
: args
[i
].reg
);
1606 int partial
= args
[i
].partial
;
1609 rtx before_arg
= get_last_insn ();
1610 /* Set to non-negative if must move a word at a time, even if just
1611 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1612 we just use a normal move insn. This value can be zero if the
1613 argument is a zero size structure with no fields. */
1617 else if (TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)) == BLKmode
)
1619 size
= int_size_in_bytes (TREE_TYPE (args
[i
].tree_value
));
1620 nregs
= (size
+ (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
1623 size
= GET_MODE_SIZE (args
[i
].mode
);
1625 /* Handle calls that pass values in multiple non-contiguous
1626 locations. The Irix 6 ABI has examples of this. */
1628 if (GET_CODE (reg
) == PARALLEL
)
1630 tree type
= TREE_TYPE (args
[i
].tree_value
);
1631 emit_group_load (reg
, args
[i
].value
, type
,
1632 int_size_in_bytes (type
));
1635 /* If simple case, just do move. If normal partial, store_one_arg
1636 has already loaded the register for us. In all other cases,
1637 load the register(s) from memory. */
1639 else if (nregs
== -1)
1641 emit_move_insn (reg
, args
[i
].value
);
1642 #ifdef BLOCK_REG_PADDING
1643 /* Handle case where we have a value that needs shifting
1644 up to the msb. eg. a QImode value and we're padding
1645 upward on a BYTES_BIG_ENDIAN machine. */
1646 if (size
< UNITS_PER_WORD
1647 && (args
[i
].locate
.where_pad
1648 == (BYTES_BIG_ENDIAN
? upward
: downward
)))
1651 int shift
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
1653 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1654 report the whole reg as used. Strictly speaking, the
1655 call only uses SIZE bytes at the msb end, but it doesn't
1656 seem worth generating rtl to say that. */
1657 reg
= gen_rtx_REG (word_mode
, REGNO (reg
));
1658 x
= expand_binop (word_mode
, ashl_optab
, reg
,
1659 GEN_INT (shift
), reg
, 1, OPTAB_WIDEN
);
1661 emit_move_insn (reg
, x
);
1666 /* If we have pre-computed the values to put in the registers in
1667 the case of non-aligned structures, copy them in now. */
1669 else if (args
[i
].n_aligned_regs
!= 0)
1670 for (j
= 0; j
< args
[i
].n_aligned_regs
; j
++)
1671 emit_move_insn (gen_rtx_REG (word_mode
, REGNO (reg
) + j
),
1672 args
[i
].aligned_regs
[j
]);
1674 else if (partial
== 0 || args
[i
].pass_on_stack
)
1676 rtx mem
= validize_mem (args
[i
].value
);
1678 #ifdef BLOCK_REG_PADDING
1679 /* Handle a BLKmode that needs shifting. */
1680 if (nregs
== 1 && size
< UNITS_PER_WORD
1681 && args
[i
].locate
.where_pad
== downward
)
1683 rtx tem
= operand_subword_force (mem
, 0, args
[i
].mode
);
1684 rtx ri
= gen_rtx_REG (word_mode
, REGNO (reg
));
1685 rtx x
= gen_reg_rtx (word_mode
);
1686 int shift
= (UNITS_PER_WORD
- size
) * BITS_PER_UNIT
;
1687 optab dir
= BYTES_BIG_ENDIAN
? lshr_optab
: ashl_optab
;
1689 emit_move_insn (x
, tem
);
1690 x
= expand_binop (word_mode
, dir
, x
, GEN_INT (shift
),
1691 ri
, 1, OPTAB_WIDEN
);
1693 emit_move_insn (ri
, x
);
1697 move_block_to_reg (REGNO (reg
), mem
, nregs
, args
[i
].mode
);
1700 /* When a parameter is a block, and perhaps in other cases, it is
1701 possible that it did a load from an argument slot that was
1702 already clobbered. */
1704 && check_sibcall_argument_overlap (before_arg
, &args
[i
], 0))
1705 *sibcall_failure
= 1;
1707 /* Handle calls that pass values in multiple non-contiguous
1708 locations. The Irix 6 ABI has examples of this. */
1709 if (GET_CODE (reg
) == PARALLEL
)
1710 use_group_regs (call_fusage
, reg
);
1711 else if (nregs
== -1)
1712 use_reg (call_fusage
, reg
);
1714 use_regs (call_fusage
, REGNO (reg
), nregs
== 0 ? 1 : nregs
);
1719 /* Try to integrate function. See expand_inline_function for documentation
1720 about the parameters. */
1723 try_to_integrate (tree fndecl
, tree actparms
, rtx target
, int ignore
,
1724 tree type
, rtx structure_value_addr
)
1729 rtx old_stack_level
= 0;
1730 int reg_parm_stack_space
= 0;
1732 #ifdef REG_PARM_STACK_SPACE
1733 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
1736 before_call
= get_last_insn ();
1738 timevar_push (TV_INTEGRATION
);
1740 temp
= expand_inline_function (fndecl
, actparms
, target
,
1742 structure_value_addr
);
1744 timevar_pop (TV_INTEGRATION
);
1746 /* If inlining succeeded, return. */
1747 if (temp
!= (rtx
) (size_t) - 1)
1749 if (ACCUMULATE_OUTGOING_ARGS
)
1751 /* If the outgoing argument list must be preserved, push
1752 the stack before executing the inlined function if it
1755 i
= reg_parm_stack_space
;
1756 if (i
> highest_outgoing_arg_in_use
)
1757 i
= highest_outgoing_arg_in_use
;
1758 while (--i
>= 0 && stack_usage_map
[i
] == 0)
1761 if (stack_arg_under_construction
|| i
>= 0)
1764 = before_call
? NEXT_INSN (before_call
) : get_insns ();
1765 rtx insn
= NULL_RTX
, seq
;
1767 /* Look for a call in the inline function code.
1768 If DECL_STRUCT_FUNCTION (fndecl)->outgoing_args_size is
1769 nonzero then there is a call and it is not necessary
1770 to scan the insns. */
1772 if (DECL_STRUCT_FUNCTION (fndecl
)->outgoing_args_size
== 0)
1773 for (insn
= first_insn
; insn
; insn
= NEXT_INSN (insn
))
1774 if (GET_CODE (insn
) == CALL_INSN
)
1779 /* Reserve enough stack space so that the largest
1780 argument list of any function call in the inline
1781 function does not overlap the argument list being
1782 evaluated. This is usually an overestimate because
1783 allocate_dynamic_stack_space reserves space for an
1784 outgoing argument list in addition to the requested
1785 space, but there is no way to ask for stack space such
1786 that an argument list of a certain length can be
1789 Add the stack space reserved for register arguments, if
1790 any, in the inline function. What is really needed is the
1791 largest value of reg_parm_stack_space in the inline
1792 function, but that is not available. Using the current
1793 value of reg_parm_stack_space is wrong, but gives
1794 correct results on all supported machines. */
1797 (DECL_STRUCT_FUNCTION (fndecl
)->outgoing_args_size
1798 + reg_parm_stack_space
);
1801 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
1802 allocate_dynamic_stack_space (GEN_INT (adjust
),
1803 NULL_RTX
, BITS_PER_UNIT
);
1806 emit_insn_before (seq
, first_insn
);
1807 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
1812 /* If the result is equivalent to TARGET, return TARGET to simplify
1813 checks in store_expr. They can be equivalent but not equal in the
1814 case of a function that returns BLKmode. */
1815 if (temp
!= target
&& rtx_equal_p (temp
, target
))
1820 /* If inlining failed, mark FNDECL as needing to be compiled
1821 separately after all. If function was declared inline,
1823 if (DECL_INLINE (fndecl
) && warn_inline
&& !flag_no_inline
1824 && optimize
> 0 && !TREE_ADDRESSABLE (fndecl
))
1826 warning ("%Jinlining failed in call to '%F'", fndecl
, fndecl
);
1827 warning ("called from here");
1829 (*lang_hooks
.mark_addressable
) (fndecl
);
1830 return (rtx
) (size_t) - 1;
1833 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1834 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1835 bytes, then we would need to push some additional bytes to pad the
1836 arguments. So, we compute an adjust to the stack pointer for an
1837 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1838 bytes. Then, when the arguments are pushed the stack will be perfectly
1839 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1840 be popped after the call. Returns the adjustment. */
1843 combine_pending_stack_adjustment_and_call (int unadjusted_args_size
,
1844 struct args_size
*args_size
,
1845 int preferred_unit_stack_boundary
)
1847 /* The number of bytes to pop so that the stack will be
1848 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1849 HOST_WIDE_INT adjustment
;
1850 /* The alignment of the stack after the arguments are pushed, if we
1851 just pushed the arguments without adjust the stack here. */
1852 HOST_WIDE_INT unadjusted_alignment
;
1854 unadjusted_alignment
1855 = ((stack_pointer_delta
+ unadjusted_args_size
)
1856 % preferred_unit_stack_boundary
);
1858 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1859 as possible -- leaving just enough left to cancel out the
1860 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1861 PENDING_STACK_ADJUST is non-negative, and congruent to
1862 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1864 /* Begin by trying to pop all the bytes. */
1865 unadjusted_alignment
1866 = (unadjusted_alignment
1867 - (pending_stack_adjust
% preferred_unit_stack_boundary
));
1868 adjustment
= pending_stack_adjust
;
1869 /* Push enough additional bytes that the stack will be aligned
1870 after the arguments are pushed. */
1871 if (preferred_unit_stack_boundary
> 1)
1873 if (unadjusted_alignment
> 0)
1874 adjustment
-= preferred_unit_stack_boundary
- unadjusted_alignment
;
1876 adjustment
+= unadjusted_alignment
;
1879 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1880 bytes after the call. The right number is the entire
1881 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1882 by the arguments in the first place. */
1884 = pending_stack_adjust
- adjustment
+ unadjusted_args_size
;
1889 /* Scan X expression if it does not dereference any argument slots
1890 we already clobbered by tail call arguments (as noted in stored_args_map
1892 Return nonzero if X expression dereferences such argument slots,
1896 check_sibcall_argument_overlap_1 (rtx x
)
1906 code
= GET_CODE (x
);
1910 if (XEXP (x
, 0) == current_function_internal_arg_pointer
)
1912 else if (GET_CODE (XEXP (x
, 0)) == PLUS
1913 && XEXP (XEXP (x
, 0), 0) ==
1914 current_function_internal_arg_pointer
1915 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
)
1916 i
= INTVAL (XEXP (XEXP (x
, 0), 1));
1920 #ifdef ARGS_GROW_DOWNWARD
1921 i
= -i
- GET_MODE_SIZE (GET_MODE (x
));
1924 for (k
= 0; k
< GET_MODE_SIZE (GET_MODE (x
)); k
++)
1925 if (i
+ k
< stored_args_map
->n_bits
1926 && TEST_BIT (stored_args_map
, i
+ k
))
1932 /* Scan all subexpressions. */
1933 fmt
= GET_RTX_FORMAT (code
);
1934 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
1938 if (check_sibcall_argument_overlap_1 (XEXP (x
, i
)))
1941 else if (*fmt
== 'E')
1943 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1944 if (check_sibcall_argument_overlap_1 (XVECEXP (x
, i
, j
)))
1951 /* Scan sequence after INSN if it does not dereference any argument slots
1952 we already clobbered by tail call arguments (as noted in stored_args_map
1953 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1954 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1955 should be 0). Return nonzero if sequence after INSN dereferences such argument
1956 slots, zero otherwise. */
1959 check_sibcall_argument_overlap (rtx insn
, struct arg_data
*arg
, int mark_stored_args_map
)
1963 if (insn
== NULL_RTX
)
1964 insn
= get_insns ();
1966 insn
= NEXT_INSN (insn
);
1968 for (; insn
; insn
= NEXT_INSN (insn
))
1970 && check_sibcall_argument_overlap_1 (PATTERN (insn
)))
1973 if (mark_stored_args_map
)
1975 #ifdef ARGS_GROW_DOWNWARD
1976 low
= -arg
->locate
.slot_offset
.constant
- arg
->locate
.size
.constant
;
1978 low
= arg
->locate
.slot_offset
.constant
;
1981 for (high
= low
+ arg
->locate
.size
.constant
; low
< high
; low
++)
1982 SET_BIT (stored_args_map
, low
);
1984 return insn
!= NULL_RTX
;
1988 fix_unsafe_tree (tree t
)
1990 switch (unsafe_for_reeval (t
))
1995 case 1: /* Mildly unsafe. */
1996 t
= unsave_expr (t
);
1999 case 2: /* Wildly unsafe. */
2001 tree var
= build_decl (VAR_DECL
, NULL_TREE
,
2004 expand_expr (t
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
));
2016 /* If function value *VALUE was returned at the most significant end of a
2017 register, shift it towards the least significant end and convert it to
2018 TYPE's mode. Return true and update *VALUE if some action was needed.
2020 TYPE is the type of the function's return value, which is known not
2021 to have mode BLKmode. */
2024 shift_returned_value (tree type
, rtx
*value
)
2026 if (targetm
.calls
.return_in_msb (type
))
2028 HOST_WIDE_INT shift
;
2030 shift
= (GET_MODE_BITSIZE (GET_MODE (*value
))
2031 - BITS_PER_UNIT
* int_size_in_bytes (type
));
2034 *value
= expand_binop (GET_MODE (*value
), lshr_optab
, *value
,
2035 GEN_INT (shift
), 0, 1, OPTAB_WIDEN
);
2036 *value
= convert_to_mode (TYPE_MODE (type
), *value
, 0);
2043 /* Generate all the code for a function call
2044 and return an rtx for its value.
2045 Store the value in TARGET (specified as an rtx) if convenient.
2046 If the value is stored in TARGET then TARGET is returned.
2047 If IGNORE is nonzero, then we ignore the value of the function call. */
2050 expand_call (tree exp
, rtx target
, int ignore
)
2052 /* Nonzero if we are currently expanding a call. */
2053 static int currently_expanding_call
= 0;
2055 /* List of actual parameters. */
2056 tree actparms
= TREE_OPERAND (exp
, 1);
2057 /* RTX for the function to be called. */
2059 /* Sequence of insns to perform a tail recursive "call". */
2060 rtx tail_recursion_insns
= NULL_RTX
;
2061 /* Sequence of insns to perform a normal "call". */
2062 rtx normal_call_insns
= NULL_RTX
;
2063 /* Sequence of insns to perform a tail recursive "call". */
2064 rtx tail_call_insns
= NULL_RTX
;
2065 /* Data type of the function. */
2067 tree type_arg_types
;
2068 /* Declaration of the function being called,
2069 or 0 if the function is computed (not known by name). */
2071 /* The type of the function being called. */
2074 int try_tail_call
= 1;
2075 int try_tail_recursion
= 1;
2078 /* Register in which non-BLKmode value will be returned,
2079 or 0 if no value or if value is BLKmode. */
2081 /* Address where we should return a BLKmode value;
2082 0 if value not BLKmode. */
2083 rtx structure_value_addr
= 0;
2084 /* Nonzero if that address is being passed by treating it as
2085 an extra, implicit first parameter. Otherwise,
2086 it is passed by being copied directly into struct_value_rtx. */
2087 int structure_value_addr_parm
= 0;
2088 /* Size of aggregate value wanted, or zero if none wanted
2089 or if we are using the non-reentrant PCC calling convention
2090 or expecting the value in registers. */
2091 HOST_WIDE_INT struct_value_size
= 0;
2092 /* Nonzero if called function returns an aggregate in memory PCC style,
2093 by returning the address of where to find it. */
2094 int pcc_struct_value
= 0;
2095 rtx struct_value
= 0;
2097 /* Number of actual parameters in this call, including struct value addr. */
2099 /* Number of named args. Args after this are anonymous ones
2100 and they must all go on the stack. */
2103 /* Vector of information about each argument.
2104 Arguments are numbered in the order they will be pushed,
2105 not the order they are written. */
2106 struct arg_data
*args
;
2108 /* Total size in bytes of all the stack-parms scanned so far. */
2109 struct args_size args_size
;
2110 struct args_size adjusted_args_size
;
2111 /* Size of arguments before any adjustments (such as rounding). */
2112 int unadjusted_args_size
;
2113 /* Data on reg parms scanned so far. */
2114 CUMULATIVE_ARGS args_so_far
;
2115 /* Nonzero if a reg parm has been scanned. */
2117 /* Nonzero if this is an indirect function call. */
2119 /* Nonzero if we must avoid push-insns in the args for this call.
2120 If stack space is allocated for register parameters, but not by the
2121 caller, then it is preallocated in the fixed part of the stack frame.
2122 So the entire argument block must then be preallocated (i.e., we
2123 ignore PUSH_ROUNDING in that case). */
2125 int must_preallocate
= !PUSH_ARGS
;
2127 /* Size of the stack reserved for parameter registers. */
2128 int reg_parm_stack_space
= 0;
2130 /* Address of space preallocated for stack parms
2131 (on machines that lack push insns), or 0 if space not preallocated. */
2134 /* Mask of ECF_ flags. */
2136 /* Nonzero if this is a call to an inline function. */
2137 int is_integrable
= 0;
2138 #ifdef REG_PARM_STACK_SPACE
2139 /* Define the boundary of the register parm stack space that needs to be
2141 int low_to_save
, high_to_save
;
2142 rtx save_area
= 0; /* Place that it is saved */
2145 int initial_highest_arg_in_use
= highest_outgoing_arg_in_use
;
2146 rtx temp_target
= 0;
2147 char *initial_stack_usage_map
= stack_usage_map
;
2149 int old_stack_allocated
;
2151 /* State variables to track stack modifications. */
2152 rtx old_stack_level
= 0;
2153 int old_stack_arg_under_construction
= 0;
2154 int old_pending_adj
= 0;
2155 int old_inhibit_defer_pop
= inhibit_defer_pop
;
2157 /* Some stack pointer alterations we make are performed via
2158 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2159 which we then also need to save/restore along the way. */
2160 int old_stack_pointer_delta
= 0;
2163 tree p
= TREE_OPERAND (exp
, 0);
2164 tree addr
= TREE_OPERAND (exp
, 0);
2166 /* The alignment of the stack, in bits. */
2167 HOST_WIDE_INT preferred_stack_boundary
;
2168 /* The alignment of the stack, in bytes. */
2169 HOST_WIDE_INT preferred_unit_stack_boundary
;
2171 /* See if this is "nothrow" function call. */
2172 if (TREE_NOTHROW (exp
))
2173 flags
|= ECF_NOTHROW
;
2175 /* See if we can find a DECL-node for the actual function.
2176 As a result, decide whether this is a call to an integrable function. */
2178 fndecl
= get_callee_fndecl (exp
);
2181 fntype
= TREE_TYPE (fndecl
);
2183 && fndecl
!= current_function_decl
2184 && DECL_INLINE (fndecl
)
2185 && DECL_STRUCT_FUNCTION (fndecl
)
2186 && DECL_STRUCT_FUNCTION (fndecl
)->inlinable
)
2188 else if (! TREE_ADDRESSABLE (fndecl
))
2190 /* In case this function later becomes inlinable,
2191 record that there was already a non-inline call to it.
2193 Use abstraction instead of setting TREE_ADDRESSABLE
2195 if (DECL_INLINE (fndecl
) && warn_inline
&& !flag_no_inline
2198 warning ("%Jcan't inline call to '%F'", fndecl
, fndecl
);
2199 warning ("called from here");
2201 (*lang_hooks
.mark_addressable
) (fndecl
);
2205 && lookup_attribute ("warn_unused_result",
2206 TYPE_ATTRIBUTES (TREE_TYPE (fndecl
))))
2207 warning ("ignoring return value of `%D', "
2208 "declared with attribute warn_unused_result", fndecl
);
2210 flags
|= flags_from_decl_or_type (fndecl
);
2213 /* If we don't have specific function to call, see if we have a
2214 attributes set in the type. */
2217 fntype
= TREE_TYPE (TREE_TYPE (p
));
2219 && lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (fntype
)))
2220 warning ("ignoring return value of function "
2221 "declared with attribute warn_unused_result");
2222 flags
|= flags_from_decl_or_type (fntype
);
2225 struct_value
= targetm
.calls
.struct_value_rtx (fntype
, 0);
2227 /* Warn if this value is an aggregate type,
2228 regardless of which calling convention we are using for it. */
2229 if (warn_aggregate_return
&& AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
2230 warning ("function call has aggregate value");
2232 /* If the result of a pure or const function call is ignored (or void),
2233 and none of its arguments are volatile, we can avoid expanding the
2234 call and just evaluate the arguments for side-effects. */
2235 if ((flags
& (ECF_CONST
| ECF_PURE
))
2236 && (ignore
|| target
== const0_rtx
2237 || TYPE_MODE (TREE_TYPE (exp
)) == VOIDmode
))
2239 bool volatilep
= false;
2242 for (arg
= actparms
; arg
; arg
= TREE_CHAIN (arg
))
2243 if (TREE_THIS_VOLATILE (TREE_VALUE (arg
)))
2251 for (arg
= actparms
; arg
; arg
= TREE_CHAIN (arg
))
2252 expand_expr (TREE_VALUE (arg
), const0_rtx
,
2253 VOIDmode
, EXPAND_NORMAL
);
2258 #ifdef REG_PARM_STACK_SPACE
2259 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
2262 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2263 if (reg_parm_stack_space
> 0 && PUSH_ARGS
)
2264 must_preallocate
= 1;
2267 /* Set up a place to return a structure. */
2269 /* Cater to broken compilers. */
2270 if (aggregate_value_p (exp
, fndecl
))
2272 /* This call returns a big structure. */
2273 flags
&= ~(ECF_CONST
| ECF_PURE
| ECF_LIBCALL_BLOCK
);
2275 #ifdef PCC_STATIC_STRUCT_RETURN
2277 pcc_struct_value
= 1;
2278 /* Easier than making that case work right. */
2281 /* In case this is a static function, note that it has been
2283 if (! TREE_ADDRESSABLE (fndecl
))
2284 (*lang_hooks
.mark_addressable
) (fndecl
);
2288 #else /* not PCC_STATIC_STRUCT_RETURN */
2290 struct_value_size
= int_size_in_bytes (TREE_TYPE (exp
));
2292 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (exp
))
2294 /* The structure value address arg is already in actparms.
2295 Pull it out. It might be nice to just leave it there, but
2296 we need to set structure_value_addr. */
2297 tree return_arg
= TREE_VALUE (actparms
);
2298 actparms
= TREE_CHAIN (actparms
);
2299 structure_value_addr
= expand_expr (return_arg
, NULL_RTX
,
2300 VOIDmode
, EXPAND_NORMAL
);
2302 else if (target
&& GET_CODE (target
) == MEM
)
2303 structure_value_addr
= XEXP (target
, 0);
2306 /* For variable-sized objects, we must be called with a target
2307 specified. If we were to allocate space on the stack here,
2308 we would have no way of knowing when to free it. */
2309 rtx d
= assign_temp (TREE_TYPE (exp
), 1, 1, 1);
2311 mark_temp_addr_taken (d
);
2312 structure_value_addr
= XEXP (d
, 0);
2316 #endif /* not PCC_STATIC_STRUCT_RETURN */
2319 /* If called function is inline, try to integrate it. */
2323 rtx temp
= try_to_integrate (fndecl
, actparms
, target
,
2324 ignore
, TREE_TYPE (exp
),
2325 structure_value_addr
);
2326 if (temp
!= (rtx
) (size_t) - 1)
2330 /* Figure out the amount to which the stack should be aligned. */
2331 preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
2334 struct cgraph_rtl_info
*i
= cgraph_rtl_info (fndecl
);
2335 if (i
&& i
->preferred_incoming_stack_boundary
)
2336 preferred_stack_boundary
= i
->preferred_incoming_stack_boundary
;
2339 /* Operand 0 is a pointer-to-function; get the type of the function. */
2340 funtype
= TREE_TYPE (addr
);
2341 if (! POINTER_TYPE_P (funtype
))
2343 funtype
= TREE_TYPE (funtype
);
2345 /* Munge the tree to split complex arguments into their imaginary
2347 if (SPLIT_COMPLEX_ARGS
)
2349 type_arg_types
= split_complex_types (TYPE_ARG_TYPES (funtype
));
2350 actparms
= split_complex_values (actparms
);
2353 type_arg_types
= TYPE_ARG_TYPES (funtype
);
2355 /* See if this is a call to a function that can return more than once
2356 or a call to longjmp or malloc. */
2357 flags
|= special_function_p (fndecl
, flags
);
2359 if (flags
& ECF_MAY_BE_ALLOCA
)
2360 current_function_calls_alloca
= 1;
2362 /* If struct_value_rtx is 0, it means pass the address
2363 as if it were an extra parameter. */
2364 if (structure_value_addr
&& struct_value
== 0)
2366 /* If structure_value_addr is a REG other than
2367 virtual_outgoing_args_rtx, we can use always use it. If it
2368 is not a REG, we must always copy it into a register.
2369 If it is virtual_outgoing_args_rtx, we must copy it to another
2370 register in some cases. */
2371 rtx temp
= (GET_CODE (structure_value_addr
) != REG
2372 || (ACCUMULATE_OUTGOING_ARGS
2373 && stack_arg_under_construction
2374 && structure_value_addr
== virtual_outgoing_args_rtx
)
2375 ? copy_addr_to_reg (convert_memory_address
2376 (Pmode
, structure_value_addr
))
2377 : structure_value_addr
);
2380 = tree_cons (error_mark_node
,
2381 make_tree (build_pointer_type (TREE_TYPE (funtype
)),
2384 structure_value_addr_parm
= 1;
2387 /* Count the arguments and set NUM_ACTUALS. */
2388 for (p
= actparms
, num_actuals
= 0; p
; p
= TREE_CHAIN (p
))
2391 /* Compute number of named args.
2392 Normally, don't include the last named arg if anonymous args follow.
2393 We do include the last named arg if
2394 targetm.calls.strict_argument_naming() returns nonzero.
2395 (If no anonymous args follow, the result of list_length is actually
2396 one too large. This is harmless.)
2398 If targetm.calls.pretend_outgoing_varargs_named() returns
2399 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2400 this machine will be able to place unnamed args that were passed
2401 in registers into the stack. So treat all args as named. This
2402 allows the insns emitting for a specific argument list to be
2403 independent of the function declaration.
2405 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2406 we do not have any reliable way to pass unnamed args in
2407 registers, so we must force them into memory. */
2409 if ((targetm
.calls
.strict_argument_naming (&args_so_far
)
2410 || ! targetm
.calls
.pretend_outgoing_varargs_named (&args_so_far
))
2411 && type_arg_types
!= 0)
2413 = (list_length (type_arg_types
)
2414 /* Don't include the last named arg. */
2415 - (targetm
.calls
.strict_argument_naming (&args_so_far
) ? 0 : 1)
2416 /* Count the struct value address, if it is passed as a parm. */
2417 + structure_value_addr_parm
);
2419 /* If we know nothing, treat all args as named. */
2420 n_named_args
= num_actuals
;
2422 /* Start updating where the next arg would go.
2424 On some machines (such as the PA) indirect calls have a different
2425 calling convention than normal calls. The fourth argument in
2426 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2428 INIT_CUMULATIVE_ARGS (args_so_far
, funtype
, NULL_RTX
, fndecl
, n_named_args
);
2430 /* Make a vector to hold all the information about each arg. */
2431 args
= alloca (num_actuals
* sizeof (struct arg_data
));
2432 memset (args
, 0, num_actuals
* sizeof (struct arg_data
));
2434 /* Build up entries in the ARGS array, compute the size of the
2435 arguments into ARGS_SIZE, etc. */
2436 initialize_argument_information (num_actuals
, args
, &args_size
,
2437 n_named_args
, actparms
, fndecl
,
2438 &args_so_far
, reg_parm_stack_space
,
2439 &old_stack_level
, &old_pending_adj
,
2440 &must_preallocate
, &flags
,
2441 CALL_FROM_THUNK_P (exp
));
2445 /* If this function requires a variable-sized argument list, don't
2446 try to make a cse'able block for this call. We may be able to
2447 do this eventually, but it is too complicated to keep track of
2448 what insns go in the cse'able block and which don't. */
2450 flags
&= ~ECF_LIBCALL_BLOCK
;
2451 must_preallocate
= 1;
2454 /* Now make final decision about preallocating stack space. */
2455 must_preallocate
= finalize_must_preallocate (must_preallocate
,
2459 /* If the structure value address will reference the stack pointer, we
2460 must stabilize it. We don't need to do this if we know that we are
2461 not going to adjust the stack pointer in processing this call. */
2463 if (structure_value_addr
2464 && (reg_mentioned_p (virtual_stack_dynamic_rtx
, structure_value_addr
)
2465 || reg_mentioned_p (virtual_outgoing_args_rtx
,
2466 structure_value_addr
))
2468 || (!ACCUMULATE_OUTGOING_ARGS
&& args_size
.constant
)))
2469 structure_value_addr
= copy_to_reg (structure_value_addr
);
2471 /* Tail calls can make things harder to debug, and we're traditionally
2472 pushed these optimizations into -O2. Don't try if we're already
2473 expanding a call, as that means we're an argument. Don't try if
2474 there's cleanups, as we know there's code to follow the call.
2476 If rtx_equal_function_value_matters is false, that means we've
2477 finished with regular parsing. Which means that some of the
2478 machinery we use to generate tail-calls is no longer in place.
2479 This is most often true of sjlj-exceptions, which we couldn't
2480 tail-call to anyway.
2482 If current_nesting_level () == 0, we're being called after
2483 the function body has been expanded. This can happen when
2484 setting up trampolines in expand_function_end. */
2485 if (currently_expanding_call
++ != 0
2486 || !flag_optimize_sibling_calls
2487 || !rtx_equal_function_value_matters
2488 || current_nesting_level () == 0
2489 || any_pending_cleanups ()
2491 try_tail_call
= try_tail_recursion
= 0;
2493 /* Tail recursion fails, when we are not dealing with recursive calls. */
2494 if (!try_tail_recursion
2495 || TREE_CODE (addr
) != ADDR_EXPR
2496 || TREE_OPERAND (addr
, 0) != current_function_decl
)
2497 try_tail_recursion
= 0;
2499 /* Rest of purposes for tail call optimizations to fail. */
2501 #ifdef HAVE_sibcall_epilogue
2502 !HAVE_sibcall_epilogue
2507 /* Doing sibling call optimization needs some work, since
2508 structure_value_addr can be allocated on the stack.
2509 It does not seem worth the effort since few optimizable
2510 sibling calls will return a structure. */
2511 || structure_value_addr
!= NULL_RTX
2512 /* Check whether the target is able to optimize the call
2514 || !(*targetm
.function_ok_for_sibcall
) (fndecl
, exp
)
2515 /* Functions that do not return exactly once may not be sibcall
2517 || (flags
& (ECF_RETURNS_TWICE
| ECF_LONGJMP
| ECF_NORETURN
))
2518 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr
)))
2519 /* If the called function is nested in the current one, it might access
2520 some of the caller's arguments, but could clobber them beforehand if
2521 the argument areas are shared. */
2522 || (fndecl
&& decl_function_context (fndecl
) == current_function_decl
)
2523 /* If this function requires more stack slots than the current
2524 function, we cannot change it into a sibling call. */
2525 || args_size
.constant
> current_function_args_size
2526 /* If the callee pops its own arguments, then it must pop exactly
2527 the same number of arguments as the current function. */
2528 || (RETURN_POPS_ARGS (fndecl
, funtype
, args_size
.constant
)
2529 != RETURN_POPS_ARGS (current_function_decl
,
2530 TREE_TYPE (current_function_decl
),
2531 current_function_args_size
))
2532 || !(*lang_hooks
.decls
.ok_for_sibcall
) (fndecl
))
2535 if (try_tail_call
|| try_tail_recursion
)
2538 actparms
= NULL_TREE
;
2539 /* Ok, we're going to give the tail call the old college try.
2540 This means we're going to evaluate the function arguments
2541 up to three times. There are two degrees of badness we can
2542 encounter, those that can be unsaved and those that can't.
2543 (See unsafe_for_reeval commentary for details.)
2545 Generate a new argument list. Pass safe arguments through
2546 unchanged. For the easy badness wrap them in UNSAVE_EXPRs.
2547 For hard badness, evaluate them now and put their resulting
2548 rtx in a temporary VAR_DECL.
2550 initialize_argument_information has ordered the array for the
2551 order to be pushed, and we must remember this when reconstructing
2552 the original argument order. */
2554 if (PUSH_ARGS_REVERSED
)
2563 i
= num_actuals
- 1;
2567 for (; i
!= end
; i
+= inc
)
2569 args
[i
].tree_value
= fix_unsafe_tree (args
[i
].tree_value
);
2570 /* We need to build actparms for optimize_tail_recursion. We can
2571 safely trash away TREE_PURPOSE, since it is unused by this
2573 if (try_tail_recursion
)
2574 actparms
= tree_cons (NULL_TREE
, args
[i
].tree_value
, actparms
);
2576 /* Do the same for the function address if it is an expression. */
2578 addr
= fix_unsafe_tree (addr
);
2579 /* Expanding one of those dangerous arguments could have added
2580 cleanups, but otherwise give it a whirl. */
2581 if (any_pending_cleanups ())
2582 try_tail_call
= try_tail_recursion
= 0;
2585 /* Generate a tail recursion sequence when calling ourselves. */
2587 if (try_tail_recursion
)
2589 /* We want to emit any pending stack adjustments before the tail
2590 recursion "call". That way we know any adjustment after the tail
2591 recursion call can be ignored if we indeed use the tail recursion
2593 int save_pending_stack_adjust
= pending_stack_adjust
;
2594 int save_stack_pointer_delta
= stack_pointer_delta
;
2596 /* Emit any queued insns now; otherwise they would end up in
2597 only one of the alternates. */
2600 /* Use a new sequence to hold any RTL we generate. We do not even
2601 know if we will use this RTL yet. The final decision can not be
2602 made until after RTL generation for the entire function is
2605 /* If expanding any of the arguments creates cleanups, we can't
2606 do a tailcall. So, we'll need to pop the pending cleanups
2607 list. If, however, all goes well, and there are no cleanups
2608 then the call to expand_start_target_temps will have no
2610 expand_start_target_temps ();
2611 if (optimize_tail_recursion (actparms
, get_last_insn ()))
2613 if (any_pending_cleanups ())
2614 try_tail_call
= try_tail_recursion
= 0;
2616 tail_recursion_insns
= get_insns ();
2618 expand_end_target_temps ();
2621 /* Restore the original pending stack adjustment for the sibling and
2622 normal call cases below. */
2623 pending_stack_adjust
= save_pending_stack_adjust
;
2624 stack_pointer_delta
= save_stack_pointer_delta
;
2627 if (profile_arc_flag
&& (flags
& ECF_FORK_OR_EXEC
))
2629 /* A fork duplicates the profile information, and an exec discards
2630 it. We can't rely on fork/exec to be paired. So write out the
2631 profile information we have gathered so far, and clear it. */
2632 /* ??? When Linux's __clone is called with CLONE_VM set, profiling
2633 is subject to race conditions, just as with multithreaded
2636 emit_library_call (gcov_flush_libfunc
, LCT_ALWAYS_RETURN
, VOIDmode
, 0);
2639 /* Ensure current function's preferred stack boundary is at least
2640 what we need. We don't have to increase alignment for recursive
2642 if (cfun
->preferred_stack_boundary
< preferred_stack_boundary
2643 && fndecl
!= current_function_decl
)
2644 cfun
->preferred_stack_boundary
= preferred_stack_boundary
;
2645 if (fndecl
== current_function_decl
)
2646 cfun
->recursive_call_emit
= true;
2648 preferred_unit_stack_boundary
= preferred_stack_boundary
/ BITS_PER_UNIT
;
2650 function_call_count
++;
2652 /* We want to make two insn chains; one for a sibling call, the other
2653 for a normal call. We will select one of the two chains after
2654 initial RTL generation is complete. */
2655 for (pass
= try_tail_call
? 0 : 1; pass
< 2; pass
++)
2657 int sibcall_failure
= 0;
2658 /* We want to emit any pending stack adjustments before the tail
2659 recursion "call". That way we know any adjustment after the tail
2660 recursion call can be ignored if we indeed use the tail recursion
2662 int save_pending_stack_adjust
= 0;
2663 int save_stack_pointer_delta
= 0;
2665 rtx before_call
, next_arg_reg
;
2669 /* Emit any queued insns now; otherwise they would end up in
2670 only one of the alternates. */
2673 /* State variables we need to save and restore between
2675 save_pending_stack_adjust
= pending_stack_adjust
;
2676 save_stack_pointer_delta
= stack_pointer_delta
;
2679 flags
&= ~ECF_SIBCALL
;
2681 flags
|= ECF_SIBCALL
;
2683 /* Other state variables that we must reinitialize each time
2684 through the loop (that are not initialized by the loop itself). */
2688 /* Start a new sequence for the normal call case.
2690 From this point on, if the sibling call fails, we want to set
2691 sibcall_failure instead of continuing the loop. */
2696 /* We know at this point that there are not currently any
2697 pending cleanups. If, however, in the process of evaluating
2698 the arguments we were to create some, we'll need to be
2699 able to get rid of them. */
2700 expand_start_target_temps ();
2703 /* Don't let pending stack adjusts add up to too much.
2704 Also, do all pending adjustments now if there is any chance
2705 this might be a call to alloca or if we are expanding a sibling
2706 call sequence or if we are calling a function that is to return
2707 with stack pointer depressed. */
2708 if (pending_stack_adjust
>= 32
2709 || (pending_stack_adjust
> 0
2710 && (flags
& (ECF_MAY_BE_ALLOCA
| ECF_SP_DEPRESSED
)))
2712 do_pending_stack_adjust ();
2714 /* When calling a const function, we must pop the stack args right away,
2715 so that the pop is deleted or moved with the call. */
2716 if (pass
&& (flags
& ECF_LIBCALL_BLOCK
))
2719 /* Precompute any arguments as needed. */
2721 precompute_arguments (flags
, num_actuals
, args
);
2723 /* Now we are about to start emitting insns that can be deleted
2724 if a libcall is deleted. */
2725 if (pass
&& (flags
& (ECF_LIBCALL_BLOCK
| ECF_MALLOC
)))
2728 adjusted_args_size
= args_size
;
2729 /* Compute the actual size of the argument block required. The variable
2730 and constant sizes must be combined, the size may have to be rounded,
2731 and there may be a minimum required size. When generating a sibcall
2732 pattern, do not round up, since we'll be re-using whatever space our
2734 unadjusted_args_size
2735 = compute_argument_block_size (reg_parm_stack_space
,
2736 &adjusted_args_size
,
2738 : preferred_stack_boundary
));
2740 old_stack_allocated
= stack_pointer_delta
- pending_stack_adjust
;
2742 /* The argument block when performing a sibling call is the
2743 incoming argument block. */
2746 argblock
= virtual_incoming_args_rtx
;
2748 #ifdef STACK_GROWS_DOWNWARD
2749 = plus_constant (argblock
, current_function_pretend_args_size
);
2751 = plus_constant (argblock
, -current_function_pretend_args_size
);
2753 stored_args_map
= sbitmap_alloc (args_size
.constant
);
2754 sbitmap_zero (stored_args_map
);
2757 /* If we have no actual push instructions, or shouldn't use them,
2758 make space for all args right now. */
2759 else if (adjusted_args_size
.var
!= 0)
2761 if (old_stack_level
== 0)
2763 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
2764 old_stack_pointer_delta
= stack_pointer_delta
;
2765 old_pending_adj
= pending_stack_adjust
;
2766 pending_stack_adjust
= 0;
2767 /* stack_arg_under_construction says whether a stack arg is
2768 being constructed at the old stack level. Pushing the stack
2769 gets a clean outgoing argument block. */
2770 old_stack_arg_under_construction
= stack_arg_under_construction
;
2771 stack_arg_under_construction
= 0;
2773 argblock
= push_block (ARGS_SIZE_RTX (adjusted_args_size
), 0, 0);
2777 /* Note that we must go through the motions of allocating an argument
2778 block even if the size is zero because we may be storing args
2779 in the area reserved for register arguments, which may be part of
2782 int needed
= adjusted_args_size
.constant
;
2784 /* Store the maximum argument space used. It will be pushed by
2785 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2788 if (needed
> current_function_outgoing_args_size
)
2789 current_function_outgoing_args_size
= needed
;
2791 if (must_preallocate
)
2793 if (ACCUMULATE_OUTGOING_ARGS
)
2795 /* Since the stack pointer will never be pushed, it is
2796 possible for the evaluation of a parm to clobber
2797 something we have already written to the stack.
2798 Since most function calls on RISC machines do not use
2799 the stack, this is uncommon, but must work correctly.
2801 Therefore, we save any area of the stack that was already
2802 written and that we are using. Here we set up to do this
2803 by making a new stack usage map from the old one. The
2804 actual save will be done by store_one_arg.
2806 Another approach might be to try to reorder the argument
2807 evaluations to avoid this conflicting stack usage. */
2809 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2810 /* Since we will be writing into the entire argument area,
2811 the map must be allocated for its entire size, not just
2812 the part that is the responsibility of the caller. */
2813 needed
+= reg_parm_stack_space
;
2816 #ifdef ARGS_GROW_DOWNWARD
2817 highest_outgoing_arg_in_use
= MAX (initial_highest_arg_in_use
,
2820 highest_outgoing_arg_in_use
= MAX (initial_highest_arg_in_use
,
2823 stack_usage_map
= alloca (highest_outgoing_arg_in_use
);
2825 if (initial_highest_arg_in_use
)
2826 memcpy (stack_usage_map
, initial_stack_usage_map
,
2827 initial_highest_arg_in_use
);
2829 if (initial_highest_arg_in_use
!= highest_outgoing_arg_in_use
)
2830 memset (&stack_usage_map
[initial_highest_arg_in_use
], 0,
2831 (highest_outgoing_arg_in_use
2832 - initial_highest_arg_in_use
));
2835 /* The address of the outgoing argument list must not be
2836 copied to a register here, because argblock would be left
2837 pointing to the wrong place after the call to
2838 allocate_dynamic_stack_space below. */
2840 argblock
= virtual_outgoing_args_rtx
;
2844 if (inhibit_defer_pop
== 0)
2846 /* Try to reuse some or all of the pending_stack_adjust
2847 to get this space. */
2849 = (combine_pending_stack_adjustment_and_call
2850 (unadjusted_args_size
,
2851 &adjusted_args_size
,
2852 preferred_unit_stack_boundary
));
2854 /* combine_pending_stack_adjustment_and_call computes
2855 an adjustment before the arguments are allocated.
2856 Account for them and see whether or not the stack
2857 needs to go up or down. */
2858 needed
= unadjusted_args_size
- needed
;
2862 /* We're releasing stack space. */
2863 /* ??? We can avoid any adjustment at all if we're
2864 already aligned. FIXME. */
2865 pending_stack_adjust
= -needed
;
2866 do_pending_stack_adjust ();
2870 /* We need to allocate space. We'll do that in
2871 push_block below. */
2872 pending_stack_adjust
= 0;
2875 /* Special case this because overhead of `push_block' in
2876 this case is non-trivial. */
2878 argblock
= virtual_outgoing_args_rtx
;
2881 argblock
= push_block (GEN_INT (needed
), 0, 0);
2882 #ifdef ARGS_GROW_DOWNWARD
2883 argblock
= plus_constant (argblock
, needed
);
2887 /* We only really need to call `copy_to_reg' in the case
2888 where push insns are going to be used to pass ARGBLOCK
2889 to a function call in ARGS. In that case, the stack
2890 pointer changes value from the allocation point to the
2891 call point, and hence the value of
2892 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2893 as well always do it. */
2894 argblock
= copy_to_reg (argblock
);
2899 if (ACCUMULATE_OUTGOING_ARGS
)
2901 /* The save/restore code in store_one_arg handles all
2902 cases except one: a constructor call (including a C
2903 function returning a BLKmode struct) to initialize
2905 if (stack_arg_under_construction
)
2907 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2908 rtx push_size
= GEN_INT (reg_parm_stack_space
2909 + adjusted_args_size
.constant
);
2911 rtx push_size
= GEN_INT (adjusted_args_size
.constant
);
2913 if (old_stack_level
== 0)
2915 emit_stack_save (SAVE_BLOCK
, &old_stack_level
,
2917 old_stack_pointer_delta
= stack_pointer_delta
;
2918 old_pending_adj
= pending_stack_adjust
;
2919 pending_stack_adjust
= 0;
2920 /* stack_arg_under_construction says whether a stack
2921 arg is being constructed at the old stack level.
2922 Pushing the stack gets a clean outgoing argument
2924 old_stack_arg_under_construction
2925 = stack_arg_under_construction
;
2926 stack_arg_under_construction
= 0;
2927 /* Make a new map for the new argument list. */
2928 stack_usage_map
= alloca (highest_outgoing_arg_in_use
);
2929 memset (stack_usage_map
, 0, highest_outgoing_arg_in_use
);
2930 highest_outgoing_arg_in_use
= 0;
2932 allocate_dynamic_stack_space (push_size
, NULL_RTX
,
2936 /* If argument evaluation might modify the stack pointer,
2937 copy the address of the argument list to a register. */
2938 for (i
= 0; i
< num_actuals
; i
++)
2939 if (args
[i
].pass_on_stack
)
2941 argblock
= copy_addr_to_reg (argblock
);
2946 compute_argument_addresses (args
, argblock
, num_actuals
);
2948 /* If we push args individually in reverse order, perform stack alignment
2949 before the first push (the last arg). */
2950 if (PUSH_ARGS_REVERSED
&& argblock
== 0
2951 && adjusted_args_size
.constant
!= unadjusted_args_size
)
2953 /* When the stack adjustment is pending, we get better code
2954 by combining the adjustments. */
2955 if (pending_stack_adjust
2956 && ! (flags
& ECF_LIBCALL_BLOCK
)
2957 && ! inhibit_defer_pop
)
2959 pending_stack_adjust
2960 = (combine_pending_stack_adjustment_and_call
2961 (unadjusted_args_size
,
2962 &adjusted_args_size
,
2963 preferred_unit_stack_boundary
));
2964 do_pending_stack_adjust ();
2966 else if (argblock
== 0)
2967 anti_adjust_stack (GEN_INT (adjusted_args_size
.constant
2968 - unadjusted_args_size
));
2970 /* Now that the stack is properly aligned, pops can't safely
2971 be deferred during the evaluation of the arguments. */
2974 funexp
= rtx_for_function_call (fndecl
, addr
);
2976 /* Figure out the register where the value, if any, will come back. */
2978 if (TYPE_MODE (TREE_TYPE (exp
)) != VOIDmode
2979 && ! structure_value_addr
)
2981 if (pcc_struct_value
)
2982 valreg
= hard_function_value (build_pointer_type (TREE_TYPE (exp
)),
2983 fndecl
, (pass
== 0));
2985 valreg
= hard_function_value (TREE_TYPE (exp
), fndecl
, (pass
== 0));
2988 /* Precompute all register parameters. It isn't safe to compute anything
2989 once we have started filling any specific hard regs. */
2990 precompute_register_parameters (num_actuals
, args
, ®_parm_seen
);
2992 #ifdef REG_PARM_STACK_SPACE
2993 /* Save the fixed argument area if it's part of the caller's frame and
2994 is clobbered by argument setup for this call. */
2995 if (ACCUMULATE_OUTGOING_ARGS
&& pass
)
2996 save_area
= save_fixed_argument_area (reg_parm_stack_space
, argblock
,
2997 &low_to_save
, &high_to_save
);
3000 /* Now store (and compute if necessary) all non-register parms.
3001 These come before register parms, since they can require block-moves,
3002 which could clobber the registers used for register parms.
3003 Parms which have partial registers are not stored here,
3004 but we do preallocate space here if they want that. */
3006 for (i
= 0; i
< num_actuals
; i
++)
3007 if (args
[i
].reg
== 0 || args
[i
].pass_on_stack
)
3009 rtx before_arg
= get_last_insn ();
3011 if (store_one_arg (&args
[i
], argblock
, flags
,
3012 adjusted_args_size
.var
!= 0,
3013 reg_parm_stack_space
)
3015 && check_sibcall_argument_overlap (before_arg
,
3017 sibcall_failure
= 1;
3019 if (flags
& ECF_CONST
3021 && args
[i
].value
== args
[i
].stack
)
3022 call_fusage
= gen_rtx_EXPR_LIST (VOIDmode
,
3023 gen_rtx_USE (VOIDmode
,
3028 /* If we have a parm that is passed in registers but not in memory
3029 and whose alignment does not permit a direct copy into registers,
3030 make a group of pseudos that correspond to each register that we
3032 if (STRICT_ALIGNMENT
)
3033 store_unaligned_arguments_into_pseudos (args
, num_actuals
);
3035 /* Now store any partially-in-registers parm.
3036 This is the last place a block-move can happen. */
3038 for (i
= 0; i
< num_actuals
; i
++)
3039 if (args
[i
].partial
!= 0 && ! args
[i
].pass_on_stack
)
3041 rtx before_arg
= get_last_insn ();
3043 if (store_one_arg (&args
[i
], argblock
, flags
,
3044 adjusted_args_size
.var
!= 0,
3045 reg_parm_stack_space
)
3047 && check_sibcall_argument_overlap (before_arg
,
3049 sibcall_failure
= 1;
3052 /* If we pushed args in forward order, perform stack alignment
3053 after pushing the last arg. */
3054 if (!PUSH_ARGS_REVERSED
&& argblock
== 0)
3055 anti_adjust_stack (GEN_INT (adjusted_args_size
.constant
3056 - unadjusted_args_size
));
3058 /* If register arguments require space on the stack and stack space
3059 was not preallocated, allocate stack space here for arguments
3060 passed in registers. */
3061 #ifdef OUTGOING_REG_PARM_STACK_SPACE
3062 if (!ACCUMULATE_OUTGOING_ARGS
3063 && must_preallocate
== 0 && reg_parm_stack_space
> 0)
3064 anti_adjust_stack (GEN_INT (reg_parm_stack_space
));
3067 /* Pass the function the address in which to return a
3069 if (pass
!= 0 && structure_value_addr
&& ! structure_value_addr_parm
)
3071 structure_value_addr
3072 = convert_memory_address (Pmode
, structure_value_addr
);
3073 emit_move_insn (struct_value
,
3075 force_operand (structure_value_addr
,
3078 if (GET_CODE (struct_value
) == REG
)
3079 use_reg (&call_fusage
, struct_value
);
3082 funexp
= prepare_call_address (funexp
, fndecl
, &call_fusage
,
3083 reg_parm_seen
, pass
== 0);
3085 load_register_parameters (args
, num_actuals
, &call_fusage
, flags
,
3086 pass
== 0, &sibcall_failure
);
3088 /* Perform postincrements before actually calling the function. */
3091 /* Save a pointer to the last insn before the call, so that we can
3092 later safely search backwards to find the CALL_INSN. */
3093 before_call
= get_last_insn ();
3095 /* Set up next argument register. For sibling calls on machines
3096 with register windows this should be the incoming register. */
3097 #ifdef FUNCTION_INCOMING_ARG
3099 next_arg_reg
= FUNCTION_INCOMING_ARG (args_so_far
, VOIDmode
,
3103 next_arg_reg
= FUNCTION_ARG (args_so_far
, VOIDmode
,
3106 /* All arguments and registers used for the call must be set up by
3109 /* Stack must be properly aligned now. */
3110 if (pass
&& stack_pointer_delta
% preferred_unit_stack_boundary
)
3113 /* Generate the actual call instruction. */
3114 emit_call_1 (funexp
, fndecl
, funtype
, unadjusted_args_size
,
3115 adjusted_args_size
.constant
, struct_value_size
,
3116 next_arg_reg
, valreg
, old_inhibit_defer_pop
, call_fusage
,
3117 flags
, & args_so_far
);
3119 /* If call is cse'able, make appropriate pair of reg-notes around it.
3120 Test valreg so we don't crash; may safely ignore `const'
3121 if return type is void. Disable for PARALLEL return values, because
3122 we have no way to move such values into a pseudo register. */
3123 if (pass
&& (flags
& ECF_LIBCALL_BLOCK
))
3127 bool failed
= valreg
== 0 || GET_CODE (valreg
) == PARALLEL
;
3129 insns
= get_insns ();
3131 /* Expansion of block moves possibly introduced a loop that may
3132 not appear inside libcall block. */
3133 for (insn
= insns
; insn
; insn
= NEXT_INSN (insn
))
3134 if (GET_CODE (insn
) == JUMP_INSN
)
3145 rtx temp
= gen_reg_rtx (GET_MODE (valreg
));
3147 /* Mark the return value as a pointer if needed. */
3148 if (TREE_CODE (TREE_TYPE (exp
)) == POINTER_TYPE
)
3149 mark_reg_pointer (temp
,
3150 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
))));
3153 if (flag_unsafe_math_optimizations
3155 && DECL_BUILT_IN (fndecl
)
3156 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_SQRT
3157 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_SQRTF
3158 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_SQRTL
))
3159 note
= gen_rtx_fmt_e (SQRT
,
3161 args
[0].initial_value
);
3164 /* Construct an "equal form" for the value which
3165 mentions all the arguments in order as well as
3166 the function name. */
3167 for (i
= 0; i
< num_actuals
; i
++)
3168 note
= gen_rtx_EXPR_LIST (VOIDmode
,
3169 args
[i
].initial_value
, note
);
3170 note
= gen_rtx_EXPR_LIST (VOIDmode
, funexp
, note
);
3172 if (flags
& ECF_PURE
)
3173 note
= gen_rtx_EXPR_LIST (VOIDmode
,
3174 gen_rtx_USE (VOIDmode
,
3175 gen_rtx_MEM (BLKmode
,
3176 gen_rtx_SCRATCH (VOIDmode
))),
3179 emit_libcall_block (insns
, temp
, valreg
, note
);
3184 else if (pass
&& (flags
& ECF_MALLOC
))
3186 rtx temp
= gen_reg_rtx (GET_MODE (valreg
));
3189 /* The return value from a malloc-like function is a pointer. */
3190 if (TREE_CODE (TREE_TYPE (exp
)) == POINTER_TYPE
)
3191 mark_reg_pointer (temp
, BIGGEST_ALIGNMENT
);
3193 emit_move_insn (temp
, valreg
);
3195 /* The return value from a malloc-like function can not alias
3197 last
= get_last_insn ();
3199 gen_rtx_EXPR_LIST (REG_NOALIAS
, temp
, REG_NOTES (last
));
3201 /* Write out the sequence. */
3202 insns
= get_insns ();
3208 /* For calls to `setjmp', etc., inform flow.c it should complain
3209 if nonvolatile values are live. For functions that cannot return,
3210 inform flow that control does not fall through. */
3212 if ((flags
& (ECF_NORETURN
| ECF_LONGJMP
)) || pass
== 0)
3214 /* The barrier must be emitted
3215 immediately after the CALL_INSN. Some ports emit more
3216 than just a CALL_INSN above, so we must search for it here. */
3218 rtx last
= get_last_insn ();
3219 while (GET_CODE (last
) != CALL_INSN
)
3221 last
= PREV_INSN (last
);
3222 /* There was no CALL_INSN? */
3223 if (last
== before_call
)
3227 emit_barrier_after (last
);
3229 /* Stack adjustments after a noreturn call are dead code.
3230 However when NO_DEFER_POP is in effect, we must preserve
3231 stack_pointer_delta. */
3232 if (inhibit_defer_pop
== 0)
3234 stack_pointer_delta
= old_stack_allocated
;
3235 pending_stack_adjust
= 0;
3239 if (flags
& ECF_LONGJMP
)
3240 current_function_calls_longjmp
= 1;
3242 /* If value type not void, return an rtx for the value. */
3244 /* If there are cleanups to be called, don't use a hard reg as target.
3245 We need to double check this and see if it matters anymore. */
3246 if (any_pending_cleanups ())
3248 if (target
&& REG_P (target
)
3249 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
3251 sibcall_failure
= 1;
3254 if (TYPE_MODE (TREE_TYPE (exp
)) == VOIDmode
3256 target
= const0_rtx
;
3257 else if (structure_value_addr
)
3259 if (target
== 0 || GET_CODE (target
) != MEM
)
3262 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp
)),
3263 memory_address (TYPE_MODE (TREE_TYPE (exp
)),
3264 structure_value_addr
));
3265 set_mem_attributes (target
, exp
, 1);
3268 else if (pcc_struct_value
)
3270 /* This is the special C++ case where we need to
3271 know what the true target was. We take care to
3272 never use this value more than once in one expression. */
3273 target
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp
)),
3274 copy_to_reg (valreg
));
3275 set_mem_attributes (target
, exp
, 1);
3277 /* Handle calls that return values in multiple non-contiguous locations.
3278 The Irix 6 ABI has examples of this. */
3279 else if (GET_CODE (valreg
) == PARALLEL
)
3281 /* Second condition is added because "target" is freed at the
3282 the end of "pass0" for -O2 when call is made to
3283 expand_end_target_temps (). Its "in_use" flag has been set
3284 to false, so allocate a new temp. */
3285 if (target
== 0 || (pass
== 1 && target
== temp_target
))
3287 /* This will only be assigned once, so it can be readonly. */
3288 tree nt
= build_qualified_type (TREE_TYPE (exp
),
3289 (TYPE_QUALS (TREE_TYPE (exp
))
3290 | TYPE_QUAL_CONST
));
3292 target
= assign_temp (nt
, 0, 1, 1);
3293 temp_target
= target
;
3294 preserve_temp_slots (target
);
3297 if (! rtx_equal_p (target
, valreg
))
3298 emit_group_store (target
, valreg
, TREE_TYPE (exp
),
3299 int_size_in_bytes (TREE_TYPE (exp
)));
3301 /* We can not support sibling calls for this case. */
3302 sibcall_failure
= 1;
3305 && GET_MODE (target
) == TYPE_MODE (TREE_TYPE (exp
))
3306 && GET_MODE (target
) == GET_MODE (valreg
))
3308 /* TARGET and VALREG cannot be equal at this point because the
3309 latter would not have REG_FUNCTION_VALUE_P true, while the
3310 former would if it were referring to the same register.
3312 If they refer to the same register, this move will be a no-op,
3313 except when function inlining is being done. */
3314 emit_move_insn (target
, valreg
);
3316 /* If we are setting a MEM, this code must be executed. Since it is
3317 emitted after the call insn, sibcall optimization cannot be
3318 performed in that case. */
3319 if (GET_CODE (target
) == MEM
)
3320 sibcall_failure
= 1;
3322 else if (TYPE_MODE (TREE_TYPE (exp
)) == BLKmode
)
3324 target
= copy_blkmode_from_reg (target
, valreg
, TREE_TYPE (exp
));
3326 /* We can not support sibling calls for this case. */
3327 sibcall_failure
= 1;
3331 if (shift_returned_value (TREE_TYPE (exp
), &valreg
))
3332 sibcall_failure
= 1;
3334 target
= copy_to_reg (valreg
);
3337 if (targetm
.calls
.promote_function_return(funtype
))
3339 /* If we promoted this return value, make the proper SUBREG. TARGET
3340 might be const0_rtx here, so be careful. */
3341 if (GET_CODE (target
) == REG
3342 && TYPE_MODE (TREE_TYPE (exp
)) != BLKmode
3343 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
3345 tree type
= TREE_TYPE (exp
);
3346 int unsignedp
= TREE_UNSIGNED (type
);
3349 /* If we don't promote as expected, something is wrong. */
3350 if (GET_MODE (target
)
3351 != promote_mode (type
, TYPE_MODE (type
), &unsignedp
, 1))
3354 if ((WORDS_BIG_ENDIAN
|| BYTES_BIG_ENDIAN
)
3355 && GET_MODE_SIZE (GET_MODE (target
))
3356 > GET_MODE_SIZE (TYPE_MODE (type
)))
3358 offset
= GET_MODE_SIZE (GET_MODE (target
))
3359 - GET_MODE_SIZE (TYPE_MODE (type
));
3360 if (! BYTES_BIG_ENDIAN
)
3361 offset
= (offset
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
3362 else if (! WORDS_BIG_ENDIAN
)
3363 offset
%= UNITS_PER_WORD
;
3365 target
= gen_rtx_SUBREG (TYPE_MODE (type
), target
, offset
);
3366 SUBREG_PROMOTED_VAR_P (target
) = 1;
3367 SUBREG_PROMOTED_UNSIGNED_SET (target
, unsignedp
);
3371 /* If size of args is variable or this was a constructor call for a stack
3372 argument, restore saved stack-pointer value. */
3374 if (old_stack_level
&& ! (flags
& ECF_SP_DEPRESSED
))
3376 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
3377 stack_pointer_delta
= old_stack_pointer_delta
;
3378 pending_stack_adjust
= old_pending_adj
;
3379 stack_arg_under_construction
= old_stack_arg_under_construction
;
3380 highest_outgoing_arg_in_use
= initial_highest_arg_in_use
;
3381 stack_usage_map
= initial_stack_usage_map
;
3382 sibcall_failure
= 1;
3384 else if (ACCUMULATE_OUTGOING_ARGS
&& pass
)
3386 #ifdef REG_PARM_STACK_SPACE
3388 restore_fixed_argument_area (save_area
, argblock
,
3389 high_to_save
, low_to_save
);
3392 /* If we saved any argument areas, restore them. */
3393 for (i
= 0; i
< num_actuals
; i
++)
3394 if (args
[i
].save_area
)
3396 enum machine_mode save_mode
= GET_MODE (args
[i
].save_area
);
3398 = gen_rtx_MEM (save_mode
,
3399 memory_address (save_mode
,
3400 XEXP (args
[i
].stack_slot
, 0)));
3402 if (save_mode
!= BLKmode
)
3403 emit_move_insn (stack_area
, args
[i
].save_area
);
3405 emit_block_move (stack_area
, args
[i
].save_area
,
3406 GEN_INT (args
[i
].locate
.size
.constant
),
3407 BLOCK_OP_CALL_PARM
);
3410 highest_outgoing_arg_in_use
= initial_highest_arg_in_use
;
3411 stack_usage_map
= initial_stack_usage_map
;
3414 /* If this was alloca, record the new stack level for nonlocal gotos.
3415 Check for the handler slots since we might not have a save area
3416 for non-local gotos. */
3418 if ((flags
& ECF_MAY_BE_ALLOCA
) && nonlocal_goto_handler_slots
!= 0)
3419 emit_stack_save (SAVE_NONLOCAL
, &nonlocal_goto_stack_level
, NULL_RTX
);
3421 /* Free up storage we no longer need. */
3422 for (i
= 0; i
< num_actuals
; ++i
)
3423 if (args
[i
].aligned_regs
)
3424 free (args
[i
].aligned_regs
);
3428 /* Undo the fake expand_start_target_temps we did earlier. If
3429 there had been any cleanups created, we've already set
3431 expand_end_target_temps ();
3434 /* If this function is returning into a memory location marked as
3435 readonly, it means it is initializing that location. We normally treat
3436 functions as not clobbering such locations, so we need to specify that
3437 this one does. We do this by adding the appropriate CLOBBER to the
3438 CALL_INSN function usage list. This cannot be done by emitting a
3439 standalone CLOBBER after the call because the latter would be ignored
3440 by at least the delay slot scheduling pass. We do this now instead of
3441 adding to call_fusage before the call to emit_call_1 because TARGET
3442 may be modified in the meantime. */
3443 if (structure_value_addr
!= 0 && target
!= 0
3444 && GET_CODE (target
) == MEM
&& RTX_UNCHANGING_P (target
))
3445 add_function_usage_to
3447 gen_rtx_EXPR_LIST (VOIDmode
, gen_rtx_CLOBBER (VOIDmode
, target
),
3450 insns
= get_insns ();
3455 tail_call_insns
= insns
;
3457 /* Restore the pending stack adjustment now that we have
3458 finished generating the sibling call sequence. */
3460 pending_stack_adjust
= save_pending_stack_adjust
;
3461 stack_pointer_delta
= save_stack_pointer_delta
;
3463 /* Prepare arg structure for next iteration. */
3464 for (i
= 0; i
< num_actuals
; i
++)
3467 args
[i
].aligned_regs
= 0;
3471 sbitmap_free (stored_args_map
);
3475 normal_call_insns
= insns
;
3477 /* Verify that we've deallocated all the stack we used. */
3478 if (! (flags
& (ECF_NORETURN
| ECF_LONGJMP
))
3479 && old_stack_allocated
!= stack_pointer_delta
3480 - pending_stack_adjust
)
3484 /* If something prevents making this a sibling call,
3485 zero out the sequence. */
3486 if (sibcall_failure
)
3487 tail_call_insns
= NULL_RTX
;
3490 /* The function optimize_sibling_and_tail_recursive_calls doesn't
3491 handle CALL_PLACEHOLDERs inside other CALL_PLACEHOLDERs. This
3492 can happen if the arguments to this function call an inline
3493 function who's expansion contains another CALL_PLACEHOLDER.
3495 If there are any C_Ps in any of these sequences, replace them
3496 with their normal call. */
3498 for (insn
= normal_call_insns
; insn
; insn
= NEXT_INSN (insn
))
3499 if (GET_CODE (insn
) == CALL_INSN
3500 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
3501 replace_call_placeholder (insn
, sibcall_use_normal
);
3503 for (insn
= tail_call_insns
; insn
; insn
= NEXT_INSN (insn
))
3504 if (GET_CODE (insn
) == CALL_INSN
3505 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
3506 replace_call_placeholder (insn
, sibcall_use_normal
);
3508 for (insn
= tail_recursion_insns
; insn
; insn
= NEXT_INSN (insn
))
3509 if (GET_CODE (insn
) == CALL_INSN
3510 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
3511 replace_call_placeholder (insn
, sibcall_use_normal
);
3513 /* If this was a potential tail recursion site, then emit a
3514 CALL_PLACEHOLDER with the normal and the tail recursion streams.
3515 One of them will be selected later. */
3516 if (tail_recursion_insns
|| tail_call_insns
)
3518 /* The tail recursion label must be kept around. We could expose
3519 its use in the CALL_PLACEHOLDER, but that creates unwanted edges
3520 and makes determining true tail recursion sites difficult.
3522 So we set LABEL_PRESERVE_P here, then clear it when we select
3523 one of the call sequences after rtl generation is complete. */
3524 if (tail_recursion_insns
)
3525 LABEL_PRESERVE_P (tail_recursion_label
) = 1;
3526 emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode
, normal_call_insns
,
3528 tail_recursion_insns
,
3529 tail_recursion_label
));
3532 emit_insn (normal_call_insns
);
3534 currently_expanding_call
--;
3536 /* If this function returns with the stack pointer depressed, ensure
3537 this block saves and restores the stack pointer, show it was
3538 changed, and adjust for any outgoing arg space. */
3539 if (flags
& ECF_SP_DEPRESSED
)
3541 clear_pending_stack_adjust ();
3542 emit_insn (gen_rtx_CLOBBER (VOIDmode
, stack_pointer_rtx
));
3543 emit_move_insn (virtual_stack_dynamic_rtx
, stack_pointer_rtx
);
3544 save_stack_pointer ();
3550 /* Traverse an argument list in VALUES and expand all complex
3551 arguments into their components. */
3553 split_complex_values (tree values
)
3557 values
= copy_list (values
);
3559 for (p
= values
; p
; p
= TREE_CHAIN (p
))
3561 tree complex_value
= TREE_VALUE (p
);
3564 complex_type
= TREE_TYPE (complex_value
);
3568 if (TREE_CODE (complex_type
) == COMPLEX_TYPE
)
3571 tree real
, imag
, next
;
3573 subtype
= TREE_TYPE (complex_type
);
3574 complex_value
= save_expr (complex_value
);
3575 real
= build1 (REALPART_EXPR
, subtype
, complex_value
);
3576 imag
= build1 (IMAGPART_EXPR
, subtype
, complex_value
);
3578 TREE_VALUE (p
) = real
;
3579 next
= TREE_CHAIN (p
);
3580 imag
= build_tree_list (NULL_TREE
, imag
);
3581 TREE_CHAIN (p
) = imag
;
3582 TREE_CHAIN (imag
) = next
;
3584 /* Skip the newly created node. */
3592 /* Traverse a list of TYPES and expand all complex types into their
3595 split_complex_types (tree types
)
3599 types
= copy_list (types
);
3601 for (p
= types
; p
; p
= TREE_CHAIN (p
))
3603 tree complex_type
= TREE_VALUE (p
);
3605 if (TREE_CODE (complex_type
) == COMPLEX_TYPE
)
3609 /* Rewrite complex type with component type. */
3610 TREE_VALUE (p
) = TREE_TYPE (complex_type
);
3611 next
= TREE_CHAIN (p
);
3613 /* Add another component type for the imaginary part. */
3614 imag
= build_tree_list (NULL_TREE
, TREE_VALUE (p
));
3615 TREE_CHAIN (p
) = imag
;
3616 TREE_CHAIN (imag
) = next
;
3618 /* Skip the newly created node. */
3626 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3627 The RETVAL parameter specifies whether return value needs to be saved, other
3628 parameters are documented in the emit_library_call function below. */
3631 emit_library_call_value_1 (int retval
, rtx orgfun
, rtx value
,
3632 enum libcall_type fn_type
,
3633 enum machine_mode outmode
, int nargs
, va_list p
)
3635 /* Total size in bytes of all the stack-parms scanned so far. */
3636 struct args_size args_size
;
3637 /* Size of arguments before any adjustments (such as rounding). */
3638 struct args_size original_args_size
;
3644 CUMULATIVE_ARGS args_so_far
;
3648 enum machine_mode mode
;
3651 struct locate_and_pad_arg_data locate
;
3655 int old_inhibit_defer_pop
= inhibit_defer_pop
;
3656 rtx call_fusage
= 0;
3659 int pcc_struct_value
= 0;
3660 int struct_value_size
= 0;
3662 int reg_parm_stack_space
= 0;
3665 tree tfom
; /* type_for_mode (outmode, 0) */
3667 #ifdef REG_PARM_STACK_SPACE
3668 /* Define the boundary of the register parm stack space that needs to be
3670 int low_to_save
, high_to_save
;
3671 rtx save_area
= 0; /* Place that it is saved. */
3674 /* Size of the stack reserved for parameter registers. */
3675 int initial_highest_arg_in_use
= highest_outgoing_arg_in_use
;
3676 char *initial_stack_usage_map
= stack_usage_map
;
3678 rtx struct_value
= targetm
.calls
.struct_value_rtx (0, 0);
3680 #ifdef REG_PARM_STACK_SPACE
3681 reg_parm_stack_space
= REG_PARM_STACK_SPACE ((tree
) 0);
3684 /* By default, library functions can not throw. */
3685 flags
= ECF_NOTHROW
;
3697 case LCT_CONST_MAKE_BLOCK
:
3698 flags
|= ECF_CONST
| ECF_LIBCALL_BLOCK
;
3700 case LCT_PURE_MAKE_BLOCK
:
3701 flags
|= ECF_PURE
| ECF_LIBCALL_BLOCK
;
3704 flags
|= ECF_NORETURN
;
3707 flags
= ECF_NORETURN
;
3709 case LCT_ALWAYS_RETURN
:
3710 flags
= ECF_ALWAYS_RETURN
;
3712 case LCT_RETURNS_TWICE
:
3713 flags
= ECF_RETURNS_TWICE
;
3718 /* Ensure current function's preferred stack boundary is at least
3720 if (cfun
->preferred_stack_boundary
< PREFERRED_STACK_BOUNDARY
)
3721 cfun
->preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
3723 /* If this kind of value comes back in memory,
3724 decide where in memory it should come back. */
3725 if (outmode
!= VOIDmode
)
3727 tfom
= (*lang_hooks
.types
.type_for_mode
) (outmode
, 0);
3728 if (aggregate_value_p (tfom
, 0))
3730 #ifdef PCC_STATIC_STRUCT_RETURN
3732 = hard_function_value (build_pointer_type (tfom
), 0, 0);
3733 mem_value
= gen_rtx_MEM (outmode
, pointer_reg
);
3734 pcc_struct_value
= 1;
3736 value
= gen_reg_rtx (outmode
);
3737 #else /* not PCC_STATIC_STRUCT_RETURN */
3738 struct_value_size
= GET_MODE_SIZE (outmode
);
3739 if (value
!= 0 && GET_CODE (value
) == MEM
)
3742 mem_value
= assign_temp (tfom
, 0, 1, 1);
3744 /* This call returns a big structure. */
3745 flags
&= ~(ECF_CONST
| ECF_PURE
| ECF_LIBCALL_BLOCK
);
3749 tfom
= void_type_node
;
3751 /* ??? Unfinished: must pass the memory address as an argument. */
3753 /* Copy all the libcall-arguments out of the varargs data
3754 and into a vector ARGVEC.
3756 Compute how to pass each argument. We only support a very small subset
3757 of the full argument passing conventions to limit complexity here since
3758 library functions shouldn't have many args. */
3760 argvec
= alloca ((nargs
+ 1) * sizeof (struct arg
));
3761 memset (argvec
, 0, (nargs
+ 1) * sizeof (struct arg
));
3763 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3764 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far
, outmode
, fun
);
3766 INIT_CUMULATIVE_ARGS (args_so_far
, NULL_TREE
, fun
, 0, nargs
);
3769 args_size
.constant
= 0;
3774 /* Now we are about to start emitting insns that can be deleted
3775 if a libcall is deleted. */
3776 if (flags
& ECF_LIBCALL_BLOCK
)
3781 /* If there's a structure value address to be passed,
3782 either pass it in the special place, or pass it as an extra argument. */
3783 if (mem_value
&& struct_value
== 0 && ! pcc_struct_value
)
3785 rtx addr
= XEXP (mem_value
, 0);
3788 /* Make sure it is a reasonable operand for a move or push insn. */
3789 if (GET_CODE (addr
) != REG
&& GET_CODE (addr
) != MEM
3790 && ! (CONSTANT_P (addr
) && LEGITIMATE_CONSTANT_P (addr
)))
3791 addr
= force_operand (addr
, NULL_RTX
);
3793 argvec
[count
].value
= addr
;
3794 argvec
[count
].mode
= Pmode
;
3795 argvec
[count
].partial
= 0;
3797 argvec
[count
].reg
= FUNCTION_ARG (args_so_far
, Pmode
, NULL_TREE
, 1);
3798 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3799 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, Pmode
, NULL_TREE
, 1))
3803 locate_and_pad_parm (Pmode
, NULL_TREE
,
3804 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3807 argvec
[count
].reg
!= 0,
3809 0, NULL_TREE
, &args_size
, &argvec
[count
].locate
);
3811 if (argvec
[count
].reg
== 0 || argvec
[count
].partial
!= 0
3812 || reg_parm_stack_space
> 0)
3813 args_size
.constant
+= argvec
[count
].locate
.size
.constant
;
3815 FUNCTION_ARG_ADVANCE (args_so_far
, Pmode
, (tree
) 0, 1);
3820 for (; count
< nargs
; count
++)
3822 rtx val
= va_arg (p
, rtx
);
3823 enum machine_mode mode
= va_arg (p
, enum machine_mode
);
3825 /* We cannot convert the arg value to the mode the library wants here;
3826 must do it earlier where we know the signedness of the arg. */
3828 || (GET_MODE (val
) != mode
&& GET_MODE (val
) != VOIDmode
))
3831 /* There's no need to call protect_from_queue, because
3832 either emit_move_insn or emit_push_insn will do that. */
3834 /* Make sure it is a reasonable operand for a move or push insn. */
3835 if (GET_CODE (val
) != REG
&& GET_CODE (val
) != MEM
3836 && ! (CONSTANT_P (val
) && LEGITIMATE_CONSTANT_P (val
)))
3837 val
= force_operand (val
, NULL_RTX
);
3839 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3840 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far
, mode
, NULL_TREE
, 1))
3844 #ifdef FUNCTION_ARG_CALLEE_COPIES
3845 && ! FUNCTION_ARG_CALLEE_COPIES (args_so_far
, mode
,
3850 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3851 functions, so we have to pretend this isn't such a function. */
3852 if (flags
& ECF_LIBCALL_BLOCK
)
3854 rtx insns
= get_insns ();
3858 flags
&= ~(ECF_CONST
| ECF_PURE
| ECF_LIBCALL_BLOCK
);
3860 /* If this was a CONST function, it is now PURE since
3861 it now reads memory. */
3862 if (flags
& ECF_CONST
)
3864 flags
&= ~ECF_CONST
;
3868 if (GET_MODE (val
) == MEM
&& ! must_copy
)
3872 slot
= assign_temp ((*lang_hooks
.types
.type_for_mode
) (mode
, 0),
3874 emit_move_insn (slot
, val
);
3878 tree type
= (*lang_hooks
.types
.type_for_mode
) (mode
, 0);
3881 = gen_rtx_MEM (mode
,
3882 expand_expr (build1 (ADDR_EXPR
,
3883 build_pointer_type (type
),
3884 make_tree (type
, val
)),
3885 NULL_RTX
, VOIDmode
, 0));
3888 call_fusage
= gen_rtx_EXPR_LIST (VOIDmode
,
3889 gen_rtx_USE (VOIDmode
, slot
),
3892 call_fusage
= gen_rtx_EXPR_LIST (VOIDmode
,
3893 gen_rtx_CLOBBER (VOIDmode
,
3898 val
= force_operand (XEXP (slot
, 0), NULL_RTX
);
3902 argvec
[count
].value
= val
;
3903 argvec
[count
].mode
= mode
;
3905 argvec
[count
].reg
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
3907 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3908 argvec
[count
].partial
3909 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, mode
, NULL_TREE
, 1);
3911 argvec
[count
].partial
= 0;
3914 locate_and_pad_parm (mode
, NULL_TREE
,
3915 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3918 argvec
[count
].reg
!= 0,
3920 argvec
[count
].partial
,
3921 NULL_TREE
, &args_size
, &argvec
[count
].locate
);
3923 if (argvec
[count
].locate
.size
.var
)
3926 if (argvec
[count
].reg
== 0 || argvec
[count
].partial
!= 0
3927 || reg_parm_stack_space
> 0)
3928 args_size
.constant
+= argvec
[count
].locate
.size
.constant
;
3930 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, (tree
) 0, 1);
3933 /* If this machine requires an external definition for library
3934 functions, write one out. */
3935 assemble_external_libcall (fun
);
3937 original_args_size
= args_size
;
3938 args_size
.constant
= (((args_size
.constant
3939 + stack_pointer_delta
3943 - stack_pointer_delta
);
3945 args_size
.constant
= MAX (args_size
.constant
,
3946 reg_parm_stack_space
);
3948 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3949 args_size
.constant
-= reg_parm_stack_space
;
3952 if (args_size
.constant
> current_function_outgoing_args_size
)
3953 current_function_outgoing_args_size
= args_size
.constant
;
3955 if (ACCUMULATE_OUTGOING_ARGS
)
3957 /* Since the stack pointer will never be pushed, it is possible for
3958 the evaluation of a parm to clobber something we have already
3959 written to the stack. Since most function calls on RISC machines
3960 do not use the stack, this is uncommon, but must work correctly.
3962 Therefore, we save any area of the stack that was already written
3963 and that we are using. Here we set up to do this by making a new
3964 stack usage map from the old one.
3966 Another approach might be to try to reorder the argument
3967 evaluations to avoid this conflicting stack usage. */
3969 needed
= args_size
.constant
;
3971 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3972 /* Since we will be writing into the entire argument area, the
3973 map must be allocated for its entire size, not just the part that
3974 is the responsibility of the caller. */
3975 needed
+= reg_parm_stack_space
;
3978 #ifdef ARGS_GROW_DOWNWARD
3979 highest_outgoing_arg_in_use
= MAX (initial_highest_arg_in_use
,
3982 highest_outgoing_arg_in_use
= MAX (initial_highest_arg_in_use
,
3985 stack_usage_map
= alloca (highest_outgoing_arg_in_use
);
3987 if (initial_highest_arg_in_use
)
3988 memcpy (stack_usage_map
, initial_stack_usage_map
,
3989 initial_highest_arg_in_use
);
3991 if (initial_highest_arg_in_use
!= highest_outgoing_arg_in_use
)
3992 memset (&stack_usage_map
[initial_highest_arg_in_use
], 0,
3993 highest_outgoing_arg_in_use
- initial_highest_arg_in_use
);
3996 /* We must be careful to use virtual regs before they're instantiated,
3997 and real regs afterwards. Loop optimization, for example, can create
3998 new libcalls after we've instantiated the virtual regs, and if we
3999 use virtuals anyway, they won't match the rtl patterns. */
4001 if (virtuals_instantiated
)
4002 argblock
= plus_constant (stack_pointer_rtx
, STACK_POINTER_OFFSET
);
4004 argblock
= virtual_outgoing_args_rtx
;
4009 argblock
= push_block (GEN_INT (args_size
.constant
), 0, 0);
4012 /* If we push args individually in reverse order, perform stack alignment
4013 before the first push (the last arg). */
4014 if (argblock
== 0 && PUSH_ARGS_REVERSED
)
4015 anti_adjust_stack (GEN_INT (args_size
.constant
4016 - original_args_size
.constant
));
4018 if (PUSH_ARGS_REVERSED
)
4029 #ifdef REG_PARM_STACK_SPACE
4030 if (ACCUMULATE_OUTGOING_ARGS
)
4032 /* The argument list is the property of the called routine and it
4033 may clobber it. If the fixed area has been used for previous
4034 parameters, we must save and restore it. */
4035 save_area
= save_fixed_argument_area (reg_parm_stack_space
, argblock
,
4036 &low_to_save
, &high_to_save
);
4040 /* Push the args that need to be pushed. */
4042 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4043 are to be pushed. */
4044 for (count
= 0; count
< nargs
; count
++, argnum
+= inc
)
4046 enum machine_mode mode
= argvec
[argnum
].mode
;
4047 rtx val
= argvec
[argnum
].value
;
4048 rtx reg
= argvec
[argnum
].reg
;
4049 int partial
= argvec
[argnum
].partial
;
4050 int lower_bound
= 0, upper_bound
= 0, i
;
4052 if (! (reg
!= 0 && partial
== 0))
4054 if (ACCUMULATE_OUTGOING_ARGS
)
4056 /* If this is being stored into a pre-allocated, fixed-size,
4057 stack area, save any previous data at that location. */
4059 #ifdef ARGS_GROW_DOWNWARD
4060 /* stack_slot is negative, but we want to index stack_usage_map
4061 with positive values. */
4062 upper_bound
= -argvec
[argnum
].locate
.offset
.constant
+ 1;
4063 lower_bound
= upper_bound
- argvec
[argnum
].locate
.size
.constant
;
4065 lower_bound
= argvec
[argnum
].locate
.offset
.constant
;
4066 upper_bound
= lower_bound
+ argvec
[argnum
].locate
.size
.constant
;
4070 /* Don't worry about things in the fixed argument area;
4071 it has already been saved. */
4072 if (i
< reg_parm_stack_space
)
4073 i
= reg_parm_stack_space
;
4074 while (i
< upper_bound
&& stack_usage_map
[i
] == 0)
4077 if (i
< upper_bound
)
4079 /* We need to make a save area. */
4081 = argvec
[argnum
].locate
.size
.constant
* BITS_PER_UNIT
;
4082 enum machine_mode save_mode
4083 = mode_for_size (size
, MODE_INT
, 1);
4085 = plus_constant (argblock
,
4086 argvec
[argnum
].locate
.offset
.constant
);
4088 = gen_rtx_MEM (save_mode
, memory_address (save_mode
, adr
));
4090 if (save_mode
== BLKmode
)
4092 argvec
[argnum
].save_area
4093 = assign_stack_temp (BLKmode
,
4094 argvec
[argnum
].locate
.size
.constant
,
4097 emit_block_move (validize_mem (argvec
[argnum
].save_area
),
4099 GEN_INT (argvec
[argnum
].locate
.size
.constant
),
4100 BLOCK_OP_CALL_PARM
);
4104 argvec
[argnum
].save_area
= gen_reg_rtx (save_mode
);
4106 emit_move_insn (argvec
[argnum
].save_area
, stack_area
);
4111 emit_push_insn (val
, mode
, NULL_TREE
, NULL_RTX
, PARM_BOUNDARY
,
4112 partial
, reg
, 0, argblock
,
4113 GEN_INT (argvec
[argnum
].locate
.offset
.constant
),
4114 reg_parm_stack_space
,
4115 ARGS_SIZE_RTX (argvec
[argnum
].locate
.alignment_pad
));
4117 /* Now mark the segment we just used. */
4118 if (ACCUMULATE_OUTGOING_ARGS
)
4119 for (i
= lower_bound
; i
< upper_bound
; i
++)
4120 stack_usage_map
[i
] = 1;
4126 /* If we pushed args in forward order, perform stack alignment
4127 after pushing the last arg. */
4128 if (argblock
== 0 && !PUSH_ARGS_REVERSED
)
4129 anti_adjust_stack (GEN_INT (args_size
.constant
4130 - original_args_size
.constant
));
4132 if (PUSH_ARGS_REVERSED
)
4137 fun
= prepare_call_address (fun
, NULL_TREE
, &call_fusage
, 0, 0);
4139 /* Now load any reg parms into their regs. */
4141 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4142 are to be pushed. */
4143 for (count
= 0; count
< nargs
; count
++, argnum
+= inc
)
4145 rtx val
= argvec
[argnum
].value
;
4146 rtx reg
= argvec
[argnum
].reg
;
4147 int partial
= argvec
[argnum
].partial
;
4149 /* Handle calls that pass values in multiple non-contiguous
4150 locations. The PA64 has examples of this for library calls. */
4151 if (reg
!= 0 && GET_CODE (reg
) == PARALLEL
)
4152 emit_group_load (reg
, val
, NULL_TREE
, GET_MODE_SIZE (GET_MODE (val
)));
4153 else if (reg
!= 0 && partial
== 0)
4154 emit_move_insn (reg
, val
);
4159 /* Any regs containing parms remain in use through the call. */
4160 for (count
= 0; count
< nargs
; count
++)
4162 rtx reg
= argvec
[count
].reg
;
4163 if (reg
!= 0 && GET_CODE (reg
) == PARALLEL
)
4164 use_group_regs (&call_fusage
, reg
);
4166 use_reg (&call_fusage
, reg
);
4169 /* Pass the function the address in which to return a structure value. */
4170 if (mem_value
!= 0 && struct_value
!= 0 && ! pcc_struct_value
)
4172 emit_move_insn (struct_value
,
4174 force_operand (XEXP (mem_value
, 0),
4176 if (GET_CODE (struct_value
) == REG
)
4177 use_reg (&call_fusage
, struct_value
);
4180 /* Don't allow popping to be deferred, since then
4181 cse'ing of library calls could delete a call and leave the pop. */
4183 valreg
= (mem_value
== 0 && outmode
!= VOIDmode
4184 ? hard_libcall_value (outmode
) : NULL_RTX
);
4186 /* Stack must be properly aligned now. */
4187 if (stack_pointer_delta
& (PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
- 1))
4190 before_call
= get_last_insn ();
4192 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4193 will set inhibit_defer_pop to that value. */
4194 /* The return type is needed to decide how many bytes the function pops.
4195 Signedness plays no role in that, so for simplicity, we pretend it's
4196 always signed. We also assume that the list of arguments passed has
4197 no impact, so we pretend it is unknown. */
4200 get_identifier (XSTR (orgfun
, 0)),
4201 build_function_type (tfom
, NULL_TREE
),
4202 original_args_size
.constant
, args_size
.constant
,
4204 FUNCTION_ARG (args_so_far
, VOIDmode
, void_type_node
, 1),
4206 old_inhibit_defer_pop
+ 1, call_fusage
, flags
, & args_so_far
);
4208 /* For calls to `setjmp', etc., inform flow.c it should complain
4209 if nonvolatile values are live. For functions that cannot return,
4210 inform flow that control does not fall through. */
4212 if (flags
& (ECF_NORETURN
| ECF_LONGJMP
))
4214 /* The barrier note must be emitted
4215 immediately after the CALL_INSN. Some ports emit more than
4216 just a CALL_INSN above, so we must search for it here. */
4218 rtx last
= get_last_insn ();
4219 while (GET_CODE (last
) != CALL_INSN
)
4221 last
= PREV_INSN (last
);
4222 /* There was no CALL_INSN? */
4223 if (last
== before_call
)
4227 emit_barrier_after (last
);
4230 /* Now restore inhibit_defer_pop to its actual original value. */
4233 /* If call is cse'able, make appropriate pair of reg-notes around it.
4234 Test valreg so we don't crash; may safely ignore `const'
4235 if return type is void. Disable for PARALLEL return values, because
4236 we have no way to move such values into a pseudo register. */
4237 if (flags
& ECF_LIBCALL_BLOCK
)
4243 insns
= get_insns ();
4253 if (GET_CODE (valreg
) == PARALLEL
)
4255 temp
= gen_reg_rtx (outmode
);
4256 emit_group_store (temp
, valreg
, NULL_TREE
,
4257 GET_MODE_SIZE (outmode
));
4261 temp
= gen_reg_rtx (GET_MODE (valreg
));
4263 /* Construct an "equal form" for the value which mentions all the
4264 arguments in order as well as the function name. */
4265 for (i
= 0; i
< nargs
; i
++)
4266 note
= gen_rtx_EXPR_LIST (VOIDmode
, argvec
[i
].value
, note
);
4267 note
= gen_rtx_EXPR_LIST (VOIDmode
, fun
, note
);
4269 insns
= get_insns ();
4272 if (flags
& ECF_PURE
)
4273 note
= gen_rtx_EXPR_LIST (VOIDmode
,
4274 gen_rtx_USE (VOIDmode
,
4275 gen_rtx_MEM (BLKmode
,
4276 gen_rtx_SCRATCH (VOIDmode
))),
4279 emit_libcall_block (insns
, temp
, valreg
, note
);
4286 /* Copy the value to the right place. */
4287 if (outmode
!= VOIDmode
&& retval
)
4293 if (value
!= mem_value
)
4294 emit_move_insn (value
, mem_value
);
4296 else if (GET_CODE (valreg
) == PARALLEL
)
4299 value
= gen_reg_rtx (outmode
);
4300 emit_group_store (value
, valreg
, NULL_TREE
, GET_MODE_SIZE (outmode
));
4302 else if (value
!= 0)
4303 emit_move_insn (value
, valreg
);
4308 if (ACCUMULATE_OUTGOING_ARGS
)
4310 #ifdef REG_PARM_STACK_SPACE
4312 restore_fixed_argument_area (save_area
, argblock
,
4313 high_to_save
, low_to_save
);
4316 /* If we saved any argument areas, restore them. */
4317 for (count
= 0; count
< nargs
; count
++)
4318 if (argvec
[count
].save_area
)
4320 enum machine_mode save_mode
= GET_MODE (argvec
[count
].save_area
);
4321 rtx adr
= plus_constant (argblock
,
4322 argvec
[count
].locate
.offset
.constant
);
4323 rtx stack_area
= gen_rtx_MEM (save_mode
,
4324 memory_address (save_mode
, adr
));
4326 if (save_mode
== BLKmode
)
4327 emit_block_move (stack_area
,
4328 validize_mem (argvec
[count
].save_area
),
4329 GEN_INT (argvec
[count
].locate
.size
.constant
),
4330 BLOCK_OP_CALL_PARM
);
4332 emit_move_insn (stack_area
, argvec
[count
].save_area
);
4335 highest_outgoing_arg_in_use
= initial_highest_arg_in_use
;
4336 stack_usage_map
= initial_stack_usage_map
;
4343 /* Output a library call to function FUN (a SYMBOL_REF rtx)
4344 (emitting the queue unless NO_QUEUE is nonzero),
4345 for a value of mode OUTMODE,
4346 with NARGS different arguments, passed as alternating rtx values
4347 and machine_modes to convert them to.
4348 The rtx values should have been passed through protect_from_queue already.
4350 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
4351 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
4352 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
4353 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
4354 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
4355 or other LCT_ value for other types of library calls. */
4358 emit_library_call (rtx orgfun
, enum libcall_type fn_type
,
4359 enum machine_mode outmode
, int nargs
, ...)
4363 va_start (p
, nargs
);
4364 emit_library_call_value_1 (0, orgfun
, NULL_RTX
, fn_type
, outmode
, nargs
, p
);
4368 /* Like emit_library_call except that an extra argument, VALUE,
4369 comes second and says where to store the result.
4370 (If VALUE is zero, this function chooses a convenient way
4371 to return the value.
4373 This function returns an rtx for where the value is to be found.
4374 If VALUE is nonzero, VALUE is returned. */
4377 emit_library_call_value (rtx orgfun
, rtx value
,
4378 enum libcall_type fn_type
,
4379 enum machine_mode outmode
, int nargs
, ...)
4384 va_start (p
, nargs
);
4385 result
= emit_library_call_value_1 (1, orgfun
, value
, fn_type
, outmode
,
4392 /* Store a single argument for a function call
4393 into the register or memory area where it must be passed.
4394 *ARG describes the argument value and where to pass it.
4396 ARGBLOCK is the address of the stack-block for all the arguments,
4397 or 0 on a machine where arguments are pushed individually.
4399 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4400 so must be careful about how the stack is used.
4402 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4403 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4404 that we need not worry about saving and restoring the stack.
4406 FNDECL is the declaration of the function we are calling.
4408 Return nonzero if this arg should cause sibcall failure,
4412 store_one_arg (struct arg_data
*arg
, rtx argblock
, int flags
,
4413 int variable_size ATTRIBUTE_UNUSED
, int reg_parm_stack_space
)
4415 tree pval
= arg
->tree_value
;
4419 int i
, lower_bound
= 0, upper_bound
= 0;
4420 int sibcall_failure
= 0;
4422 if (TREE_CODE (pval
) == ERROR_MARK
)
4425 /* Push a new temporary level for any temporaries we make for
4429 if (ACCUMULATE_OUTGOING_ARGS
&& !(flags
& ECF_SIBCALL
))
4431 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4432 save any previous data at that location. */
4433 if (argblock
&& ! variable_size
&& arg
->stack
)
4435 #ifdef ARGS_GROW_DOWNWARD
4436 /* stack_slot is negative, but we want to index stack_usage_map
4437 with positive values. */
4438 if (GET_CODE (XEXP (arg
->stack_slot
, 0)) == PLUS
)
4439 upper_bound
= -INTVAL (XEXP (XEXP (arg
->stack_slot
, 0), 1)) + 1;
4443 lower_bound
= upper_bound
- arg
->locate
.size
.constant
;
4445 if (GET_CODE (XEXP (arg
->stack_slot
, 0)) == PLUS
)
4446 lower_bound
= INTVAL (XEXP (XEXP (arg
->stack_slot
, 0), 1));
4450 upper_bound
= lower_bound
+ arg
->locate
.size
.constant
;
4454 /* Don't worry about things in the fixed argument area;
4455 it has already been saved. */
4456 if (i
< reg_parm_stack_space
)
4457 i
= reg_parm_stack_space
;
4458 while (i
< upper_bound
&& stack_usage_map
[i
] == 0)
4461 if (i
< upper_bound
)
4463 /* We need to make a save area. */
4464 unsigned int size
= arg
->locate
.size
.constant
* BITS_PER_UNIT
;
4465 enum machine_mode save_mode
= mode_for_size (size
, MODE_INT
, 1);
4466 rtx adr
= memory_address (save_mode
, XEXP (arg
->stack_slot
, 0));
4467 rtx stack_area
= gen_rtx_MEM (save_mode
, adr
);
4469 if (save_mode
== BLKmode
)
4471 tree ot
= TREE_TYPE (arg
->tree_value
);
4472 tree nt
= build_qualified_type (ot
, (TYPE_QUALS (ot
)
4473 | TYPE_QUAL_CONST
));
4475 arg
->save_area
= assign_temp (nt
, 0, 1, 1);
4476 preserve_temp_slots (arg
->save_area
);
4477 emit_block_move (validize_mem (arg
->save_area
), stack_area
,
4478 expr_size (arg
->tree_value
),
4479 BLOCK_OP_CALL_PARM
);
4483 arg
->save_area
= gen_reg_rtx (save_mode
);
4484 emit_move_insn (arg
->save_area
, stack_area
);
4490 /* If this isn't going to be placed on both the stack and in registers,
4491 set up the register and number of words. */
4492 if (! arg
->pass_on_stack
)
4494 if (flags
& ECF_SIBCALL
)
4495 reg
= arg
->tail_call_reg
;
4498 partial
= arg
->partial
;
4501 if (reg
!= 0 && partial
== 0)
4502 /* Being passed entirely in a register. We shouldn't be called in
4506 /* If this arg needs special alignment, don't load the registers
4508 if (arg
->n_aligned_regs
!= 0)
4511 /* If this is being passed partially in a register, we can't evaluate
4512 it directly into its stack slot. Otherwise, we can. */
4513 if (arg
->value
== 0)
4515 /* stack_arg_under_construction is nonzero if a function argument is
4516 being evaluated directly into the outgoing argument list and
4517 expand_call must take special action to preserve the argument list
4518 if it is called recursively.
4520 For scalar function arguments stack_usage_map is sufficient to
4521 determine which stack slots must be saved and restored. Scalar
4522 arguments in general have pass_on_stack == 0.
4524 If this argument is initialized by a function which takes the
4525 address of the argument (a C++ constructor or a C function
4526 returning a BLKmode structure), then stack_usage_map is
4527 insufficient and expand_call must push the stack around the
4528 function call. Such arguments have pass_on_stack == 1.
4530 Note that it is always safe to set stack_arg_under_construction,
4531 but this generates suboptimal code if set when not needed. */
4533 if (arg
->pass_on_stack
)
4534 stack_arg_under_construction
++;
4536 arg
->value
= expand_expr (pval
,
4538 || TYPE_MODE (TREE_TYPE (pval
)) != arg
->mode
)
4539 ? NULL_RTX
: arg
->stack
,
4540 VOIDmode
, EXPAND_STACK_PARM
);
4542 /* If we are promoting object (or for any other reason) the mode
4543 doesn't agree, convert the mode. */
4545 if (arg
->mode
!= TYPE_MODE (TREE_TYPE (pval
)))
4546 arg
->value
= convert_modes (arg
->mode
, TYPE_MODE (TREE_TYPE (pval
)),
4547 arg
->value
, arg
->unsignedp
);
4549 if (arg
->pass_on_stack
)
4550 stack_arg_under_construction
--;
4553 /* Don't allow anything left on stack from computation
4554 of argument to alloca. */
4555 if (flags
& ECF_MAY_BE_ALLOCA
)
4556 do_pending_stack_adjust ();
4558 if (arg
->value
== arg
->stack
)
4559 /* If the value is already in the stack slot, we are done. */
4561 else if (arg
->mode
!= BLKmode
)
4565 /* Argument is a scalar, not entirely passed in registers.
4566 (If part is passed in registers, arg->partial says how much
4567 and emit_push_insn will take care of putting it there.)
4569 Push it, and if its size is less than the
4570 amount of space allocated to it,
4571 also bump stack pointer by the additional space.
4572 Note that in C the default argument promotions
4573 will prevent such mismatches. */
4575 size
= GET_MODE_SIZE (arg
->mode
);
4576 /* Compute how much space the push instruction will push.
4577 On many machines, pushing a byte will advance the stack
4578 pointer by a halfword. */
4579 #ifdef PUSH_ROUNDING
4580 size
= PUSH_ROUNDING (size
);
4584 /* Compute how much space the argument should get:
4585 round up to a multiple of the alignment for arguments. */
4586 if (none
!= FUNCTION_ARG_PADDING (arg
->mode
, TREE_TYPE (pval
)))
4587 used
= (((size
+ PARM_BOUNDARY
/ BITS_PER_UNIT
- 1)
4588 / (PARM_BOUNDARY
/ BITS_PER_UNIT
))
4589 * (PARM_BOUNDARY
/ BITS_PER_UNIT
));
4591 /* This isn't already where we want it on the stack, so put it there.
4592 This can either be done with push or copy insns. */
4593 emit_push_insn (arg
->value
, arg
->mode
, TREE_TYPE (pval
), NULL_RTX
,
4594 PARM_BOUNDARY
, partial
, reg
, used
- size
, argblock
,
4595 ARGS_SIZE_RTX (arg
->locate
.offset
), reg_parm_stack_space
,
4596 ARGS_SIZE_RTX (arg
->locate
.alignment_pad
));
4598 /* Unless this is a partially-in-register argument, the argument is now
4601 arg
->value
= arg
->stack
;
4605 /* BLKmode, at least partly to be pushed. */
4607 unsigned int parm_align
;
4611 /* Pushing a nonscalar.
4612 If part is passed in registers, PARTIAL says how much
4613 and emit_push_insn will take care of putting it there. */
4615 /* Round its size up to a multiple
4616 of the allocation unit for arguments. */
4618 if (arg
->locate
.size
.var
!= 0)
4621 size_rtx
= ARGS_SIZE_RTX (arg
->locate
.size
);
4625 /* PUSH_ROUNDING has no effect on us, because
4626 emit_push_insn for BLKmode is careful to avoid it. */
4627 if (reg
&& GET_CODE (reg
) == PARALLEL
)
4629 /* Use the size of the elt to compute excess. */
4630 rtx elt
= XEXP (XVECEXP (reg
, 0, 0), 0);
4631 excess
= (arg
->locate
.size
.constant
4632 - int_size_in_bytes (TREE_TYPE (pval
))
4633 + partial
* GET_MODE_SIZE (GET_MODE (elt
)));
4636 excess
= (arg
->locate
.size
.constant
4637 - int_size_in_bytes (TREE_TYPE (pval
))
4638 + partial
* UNITS_PER_WORD
);
4639 size_rtx
= expand_expr (size_in_bytes (TREE_TYPE (pval
)),
4640 NULL_RTX
, TYPE_MODE (sizetype
), 0);
4643 /* Some types will require stricter alignment, which will be
4644 provided for elsewhere in argument layout. */
4645 parm_align
= MAX (PARM_BOUNDARY
, TYPE_ALIGN (TREE_TYPE (pval
)));
4647 /* When an argument is padded down, the block is aligned to
4648 PARM_BOUNDARY, but the actual argument isn't. */
4649 if (FUNCTION_ARG_PADDING (arg
->mode
, TREE_TYPE (pval
)) == downward
)
4651 if (arg
->locate
.size
.var
)
4652 parm_align
= BITS_PER_UNIT
;
4655 unsigned int excess_align
= (excess
& -excess
) * BITS_PER_UNIT
;
4656 parm_align
= MIN (parm_align
, excess_align
);
4660 if ((flags
& ECF_SIBCALL
) && GET_CODE (arg
->value
) == MEM
)
4662 /* emit_push_insn might not work properly if arg->value and
4663 argblock + arg->locate.offset areas overlap. */
4667 if (XEXP (x
, 0) == current_function_internal_arg_pointer
4668 || (GET_CODE (XEXP (x
, 0)) == PLUS
4669 && XEXP (XEXP (x
, 0), 0) ==
4670 current_function_internal_arg_pointer
4671 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
))
4673 if (XEXP (x
, 0) != current_function_internal_arg_pointer
)
4674 i
= INTVAL (XEXP (XEXP (x
, 0), 1));
4676 /* expand_call should ensure this. */
4677 if (arg
->locate
.offset
.var
|| GET_CODE (size_rtx
) != CONST_INT
)
4680 if (arg
->locate
.offset
.constant
> i
)
4682 if (arg
->locate
.offset
.constant
< i
+ INTVAL (size_rtx
))
4683 sibcall_failure
= 1;
4685 else if (arg
->locate
.offset
.constant
< i
)
4687 if (i
< arg
->locate
.offset
.constant
+ INTVAL (size_rtx
))
4688 sibcall_failure
= 1;
4693 emit_push_insn (arg
->value
, arg
->mode
, TREE_TYPE (pval
), size_rtx
,
4694 parm_align
, partial
, reg
, excess
, argblock
,
4695 ARGS_SIZE_RTX (arg
->locate
.offset
), reg_parm_stack_space
,
4696 ARGS_SIZE_RTX (arg
->locate
.alignment_pad
));
4698 /* Unless this is a partially-in-register argument, the argument is now
4701 ??? Unlike the case above, in which we want the actual
4702 address of the data, so that we can load it directly into a
4703 register, here we want the address of the stack slot, so that
4704 it's properly aligned for word-by-word copying or something
4705 like that. It's not clear that this is always correct. */
4707 arg
->value
= arg
->stack_slot
;
4710 /* Mark all slots this store used. */
4711 if (ACCUMULATE_OUTGOING_ARGS
&& !(flags
& ECF_SIBCALL
)
4712 && argblock
&& ! variable_size
&& arg
->stack
)
4713 for (i
= lower_bound
; i
< upper_bound
; i
++)
4714 stack_usage_map
[i
] = 1;
4716 /* Once we have pushed something, pops can't safely
4717 be deferred during the rest of the arguments. */
4720 /* ANSI doesn't require a sequence point here,
4721 but PCC has one, so this will avoid some problems. */
4724 /* Free any temporary slots made in processing this argument. Show
4725 that we might have taken the address of something and pushed that
4727 preserve_temp_slots (NULL_RTX
);
4731 return sibcall_failure
;
4734 /* Nonzero if we do not know how to pass TYPE solely in registers.
4735 We cannot do so in the following cases:
4737 - if the type has variable size
4738 - if the type is marked as addressable (it is required to be constructed
4740 - if the padding and mode of the type is such that a copy into a register
4741 would put it into the wrong part of the register.
4743 Which padding can't be supported depends on the byte endianness.
4745 A value in a register is implicitly padded at the most significant end.
4746 On a big-endian machine, that is the lower end in memory.
4747 So a value padded in memory at the upper end can't go in a register.
4748 For a little-endian machine, the reverse is true. */
4751 default_must_pass_in_stack (enum machine_mode mode
, tree type
)
4756 /* If the type has variable size... */
4757 if (TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
4760 /* If the type is marked as addressable (it is required
4761 to be constructed into the stack)... */
4762 if (TREE_ADDRESSABLE (type
))
4765 /* If the padding and mode of the type is such that a copy into
4766 a register would put it into the wrong part of the register. */
4768 && int_size_in_bytes (type
) % (PARM_BOUNDARY
/ BITS_PER_UNIT
)
4769 && (FUNCTION_ARG_PADDING (mode
, type
)
4770 == (BYTES_BIG_ENDIAN
? upward
: downward
)))