1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
30 #include "insn-flags.h"
36 #ifndef ACCUMULATE_OUTGOING_ARGS
37 #define ACCUMULATE_OUTGOING_ARGS 0
40 /* Supply a default definition for PUSH_ARGS. */
43 #define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
49 #if !defined FUNCTION_OK_FOR_SIBCALL
50 #define FUNCTION_OK_FOR_SIBCALL(DECL) 1
53 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
54 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
57 /* Decide whether a function's arguments should be processed
58 from first to last or from last to first.
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED PUSH_ARGS
71 #ifndef PUSH_ARGS_REVERSED
72 #define PUSH_ARGS_REVERSED 0
75 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
76 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
78 /* Data structure and subroutines used within expand_call. */
82 /* Tree node for this argument. */
84 /* Mode for value; TYPE_MODE unless promoted. */
85 enum machine_mode mode
;
86 /* Current RTL value for argument, or 0 if it isn't precomputed. */
88 /* Initially-compute RTL value for argument; only for const functions. */
90 /* Register to pass this argument in, 0 if passed on stack, or an
91 PARALLEL if the arg is to be copied into multiple non-contiguous
94 /* Register to pass this argument in when generating tail call sequence.
95 This is not the same register as for normal calls on machines with
98 /* If REG was promoted from the actual mode of the argument expression,
99 indicates whether the promotion is sign- or zero-extended. */
101 /* Number of registers to use. 0 means put the whole arg in registers.
102 Also 0 if not passed in registers. */
104 /* Non-zero if argument must be passed on stack.
105 Note that some arguments may be passed on the stack
106 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
107 pass_on_stack identifies arguments that *cannot* go in registers. */
109 /* Offset of this argument from beginning of stack-args. */
110 struct args_size offset
;
111 /* Similar, but offset to the start of the stack slot. Different from
112 OFFSET if this arg pads downward. */
113 struct args_size slot_offset
;
114 /* Size of this argument on the stack, rounded up for any padding it gets,
115 parts of the argument passed in registers do not count.
116 If REG_PARM_STACK_SPACE is defined, then register parms
117 are counted here as well. */
118 struct args_size size
;
119 /* Location on the stack at which parameter should be stored. The store
120 has already been done if STACK == VALUE. */
122 /* Location on the stack of the start of this argument slot. This can
123 differ from STACK if this arg pads downward. This location is known
124 to be aligned to FUNCTION_ARG_BOUNDARY. */
126 /* Place that this stack area has been saved, if needed. */
128 /* If an argument's alignment does not permit direct copying into registers,
129 copy in smaller-sized pieces into pseudos. These are stored in a
130 block pointed to by this field. The next field says how many
131 word-sized pseudos we made. */
134 /* The amount that the stack pointer needs to be adjusted to
135 force alignment for the next argument. */
136 struct args_size alignment_pad
;
139 /* A vector of one char per byte of stack space. A byte if non-zero if
140 the corresponding stack location has been used.
141 This vector is used to prevent a function call within an argument from
142 clobbering any stack already set up. */
143 static char *stack_usage_map
;
145 /* Size of STACK_USAGE_MAP. */
146 static int highest_outgoing_arg_in_use
;
148 /* stack_arg_under_construction is nonzero when an argument may be
149 initialized with a constructor call (including a C function that
150 returns a BLKmode struct) and expand_call must take special action
151 to make sure the object being constructed does not overlap the
152 argument list for the constructor call. */
153 int stack_arg_under_construction
;
155 static int calls_function
PARAMS ((tree
, int));
156 static int calls_function_1
PARAMS ((tree
, int));
158 /* Nonzero if this is a call to a `const' function. */
160 /* Nonzero if this is a call to a `volatile' function. */
161 #define ECF_NORETURN 2
162 /* Nonzero if this is a call to malloc or a related function. */
164 /* Nonzero if it is plausible that this is a call to alloca. */
165 #define ECF_MAY_BE_ALLOCA 8
166 /* Nonzero if this is a call to a function that won't throw an exception. */
167 #define ECF_NOTHROW 16
168 /* Nonzero if this is a call to setjmp or a related function. */
169 #define ECF_RETURNS_TWICE 32
170 /* Nonzero if this is a call to `longjmp'. */
171 #define ECF_LONGJMP 64
172 /* Nonzero if this is a syscall that makes a new process in the image of
174 #define ECF_FORK_OR_EXEC 128
175 #define ECF_SIBCALL 256
176 /* Nonzero if this is a call to "pure" function (like const function,
177 but may read memory. */
180 static void emit_call_1
PARAMS ((rtx
, tree
, tree
, HOST_WIDE_INT
,
181 HOST_WIDE_INT
, HOST_WIDE_INT
, rtx
,
182 rtx
, int, rtx
, int));
183 static void precompute_register_parameters
PARAMS ((int,
186 static void store_one_arg
PARAMS ((struct arg_data
*, rtx
, int, int,
188 static void store_unaligned_arguments_into_pseudos
PARAMS ((struct arg_data
*,
190 static int finalize_must_preallocate
PARAMS ((int, int,
192 struct args_size
*));
193 static void precompute_arguments
PARAMS ((int, int,
195 static int compute_argument_block_size
PARAMS ((int,
198 static void initialize_argument_information
PARAMS ((int,
205 static void compute_argument_addresses
PARAMS ((struct arg_data
*,
207 static rtx rtx_for_function_call
PARAMS ((tree
, tree
));
208 static void load_register_parameters
PARAMS ((struct arg_data
*,
210 static int libfunc_nothrow
PARAMS ((rtx
));
211 static rtx emit_library_call_value_1
PARAMS ((int, rtx
, rtx
, int,
214 static int special_function_p
PARAMS ((tree
, int));
215 static int flags_from_decl_or_type
PARAMS ((tree
));
216 static rtx try_to_integrate
PARAMS ((tree
, tree
, rtx
,
218 static int combine_pending_stack_adjustment_and_call
219 PARAMS ((int, struct args_size
*, int));
221 #ifdef REG_PARM_STACK_SPACE
222 static rtx save_fixed_argument_area
PARAMS ((int, rtx
, int *, int *));
223 static void restore_fixed_argument_area
PARAMS ((rtx
, rtx
, int, int));
226 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
229 If WHICH is 0, return 1 if EXP contains a call to any function.
230 Actually, we only need return 1 if evaluating EXP would require pushing
231 arguments on the stack, but that is too difficult to compute, so we just
232 assume any function call might require the stack. */
234 static tree calls_function_save_exprs
;
237 calls_function (exp
, which
)
243 calls_function_save_exprs
= 0;
244 val
= calls_function_1 (exp
, which
);
245 calls_function_save_exprs
= 0;
249 /* Recursive function to do the work of above function. */
252 calls_function_1 (exp
, which
)
257 enum tree_code code
= TREE_CODE (exp
);
258 int class = TREE_CODE_CLASS (code
);
259 int length
= first_rtl_op (code
);
261 /* If this code is language-specific, we don't know what it will do. */
262 if ((int) code
>= NUM_TREE_CODES
)
270 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
271 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
273 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0),
275 & ECF_MAY_BE_ALLOCA
))
281 if (SAVE_EXPR_RTL (exp
) != 0)
283 if (value_member (exp
, calls_function_save_exprs
))
285 calls_function_save_exprs
= tree_cons (NULL_TREE
, exp
,
286 calls_function_save_exprs
);
287 return (TREE_OPERAND (exp
, 0) != 0
288 && calls_function_1 (TREE_OPERAND (exp
, 0), which
));
293 register tree subblock
;
295 for (local
= BLOCK_VARS (exp
); local
; local
= TREE_CHAIN (local
))
296 if (DECL_INITIAL (local
) != 0
297 && calls_function_1 (DECL_INITIAL (local
), which
))
300 for (subblock
= BLOCK_SUBBLOCKS (exp
);
302 subblock
= TREE_CHAIN (subblock
))
303 if (calls_function_1 (subblock
, which
))
309 for (; exp
!= 0; exp
= TREE_CHAIN (exp
))
310 if (calls_function_1 (TREE_VALUE (exp
), which
))
318 /* Only expressions, references, and blocks can contain calls. */
319 if (! IS_EXPR_CODE_CLASS (class) && class != 'r' && class != 'b')
322 for (i
= 0; i
< length
; i
++)
323 if (TREE_OPERAND (exp
, i
) != 0
324 && calls_function_1 (TREE_OPERAND (exp
, i
), which
))
330 /* Force FUNEXP into a form suitable for the address of a CALL,
331 and return that as an rtx. Also load the static chain register
332 if FNDECL is a nested function.
334 CALL_FUSAGE points to a variable holding the prospective
335 CALL_INSN_FUNCTION_USAGE information. */
338 prepare_call_address (funexp
, fndecl
, call_fusage
, reg_parm_seen
)
344 rtx static_chain_value
= 0;
346 funexp
= protect_from_queue (funexp
, 0);
349 /* Get possible static chain value for nested function in C. */
350 static_chain_value
= lookup_static_chain (fndecl
);
352 /* Make a valid memory address and copy constants thru pseudo-regs,
353 but not for a constant address if -fno-function-cse. */
354 if (GET_CODE (funexp
) != SYMBOL_REF
)
355 /* If we are using registers for parameters, force the
356 function address into a register now. */
357 funexp
= ((SMALL_REGISTER_CLASSES
&& reg_parm_seen
)
358 ? force_not_mem (memory_address (FUNCTION_MODE
, funexp
))
359 : memory_address (FUNCTION_MODE
, funexp
));
362 #ifndef NO_FUNCTION_CSE
363 if (optimize
&& ! flag_no_function_cse
)
364 #ifdef NO_RECURSIVE_FUNCTION_CSE
365 if (fndecl
!= current_function_decl
)
367 funexp
= force_reg (Pmode
, funexp
);
371 if (static_chain_value
!= 0)
373 emit_move_insn (static_chain_rtx
, static_chain_value
);
375 if (GET_CODE (static_chain_rtx
) == REG
)
376 use_reg (call_fusage
, static_chain_rtx
);
382 /* Generate instructions to call function FUNEXP,
383 and optionally pop the results.
384 The CALL_INSN is the first insn generated.
386 FNDECL is the declaration node of the function. This is given to the
387 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
389 FUNTYPE is the data type of the function. This is given to the macro
390 RETURN_POPS_ARGS to determine whether this function pops its own args.
391 We used to allow an identifier for library functions, but that doesn't
392 work when the return type is an aggregate type and the calling convention
393 says that the pointer to this aggregate is to be popped by the callee.
395 STACK_SIZE is the number of bytes of arguments on the stack,
396 ROUNDED_STACK_SIZE is that number rounded up to
397 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
398 both to put into the call insn and to generate explicit popping
401 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
402 It is zero if this call doesn't want a structure value.
404 NEXT_ARG_REG is the rtx that results from executing
405 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
406 just after all the args have had their registers assigned.
407 This could be whatever you like, but normally it is the first
408 arg-register beyond those used for args in this call,
409 or 0 if all the arg-registers are used in this call.
410 It is passed on to `gen_call' so you can put this info in the call insn.
412 VALREG is a hard register in which a value is returned,
413 or 0 if the call does not return a value.
415 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
416 the args to this call were processed.
417 We restore `inhibit_defer_pop' to that value.
419 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
420 denote registers used by the called function. */
423 emit_call_1 (funexp
, fndecl
, funtype
, stack_size
, rounded_stack_size
,
424 struct_value_size
, next_arg_reg
, valreg
, old_inhibit_defer_pop
,
425 call_fusage
, ecf_flags
)
427 tree fndecl ATTRIBUTE_UNUSED
;
428 tree funtype ATTRIBUTE_UNUSED
;
429 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED
;
430 HOST_WIDE_INT rounded_stack_size
;
431 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED
;
434 int old_inhibit_defer_pop
;
438 rtx rounded_stack_size_rtx
= GEN_INT (rounded_stack_size
);
440 int already_popped
= 0;
441 HOST_WIDE_INT n_popped
= RETURN_POPS_ARGS (fndecl
, funtype
, stack_size
);
442 #if defined (HAVE_call) && defined (HAVE_call_value)
443 rtx struct_value_size_rtx
;
444 struct_value_size_rtx
= GEN_INT (struct_value_size
);
447 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
448 and we don't want to load it into a register as an optimization,
449 because prepare_call_address already did it if it should be done. */
450 if (GET_CODE (funexp
) != SYMBOL_REF
)
451 funexp
= memory_address (FUNCTION_MODE
, funexp
);
453 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
454 if ((ecf_flags
& ECF_SIBCALL
)
455 && HAVE_sibcall_pop
&& HAVE_sibcall_value_pop
456 && (RETURN_POPS_ARGS (fndecl
, funtype
, stack_size
) > 0
459 rtx n_pop
= GEN_INT (RETURN_POPS_ARGS (fndecl
, funtype
, stack_size
));
462 /* If this subroutine pops its own args, record that in the call insn
463 if possible, for the sake of frame pointer elimination. */
466 pat
= GEN_SIBCALL_VALUE_POP (valreg
,
467 gen_rtx_MEM (FUNCTION_MODE
, funexp
),
468 rounded_stack_size_rtx
, next_arg_reg
,
471 pat
= GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE
, funexp
),
472 rounded_stack_size_rtx
, next_arg_reg
, n_pop
);
474 emit_call_insn (pat
);
480 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
481 /* If the target has "call" or "call_value" insns, then prefer them
482 if no arguments are actually popped. If the target does not have
483 "call" or "call_value" insns, then we must use the popping versions
484 even if the call has no arguments to pop. */
485 #if defined (HAVE_call) && defined (HAVE_call_value)
486 if (HAVE_call
&& HAVE_call_value
&& HAVE_call_pop
&& HAVE_call_value_pop
489 if (HAVE_call_pop
&& HAVE_call_value_pop
)
492 rtx n_pop
= GEN_INT (n_popped
);
495 /* If this subroutine pops its own args, record that in the call insn
496 if possible, for the sake of frame pointer elimination. */
499 pat
= GEN_CALL_VALUE_POP (valreg
,
500 gen_rtx_MEM (FUNCTION_MODE
, funexp
),
501 rounded_stack_size_rtx
, next_arg_reg
, n_pop
);
503 pat
= GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE
, funexp
),
504 rounded_stack_size_rtx
, next_arg_reg
, n_pop
);
506 emit_call_insn (pat
);
512 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
513 if ((ecf_flags
& ECF_SIBCALL
)
514 && HAVE_sibcall
&& HAVE_sibcall_value
)
517 emit_call_insn (GEN_SIBCALL_VALUE (valreg
,
518 gen_rtx_MEM (FUNCTION_MODE
, funexp
),
519 rounded_stack_size_rtx
,
520 next_arg_reg
, NULL_RTX
));
522 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE
, funexp
),
523 rounded_stack_size_rtx
, next_arg_reg
,
524 struct_value_size_rtx
));
529 #if defined (HAVE_call) && defined (HAVE_call_value)
530 if (HAVE_call
&& HAVE_call_value
)
533 emit_call_insn (GEN_CALL_VALUE (valreg
,
534 gen_rtx_MEM (FUNCTION_MODE
, funexp
),
535 rounded_stack_size_rtx
, next_arg_reg
,
538 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE
, funexp
),
539 rounded_stack_size_rtx
, next_arg_reg
,
540 struct_value_size_rtx
));
546 /* Find the CALL insn we just emitted. */
547 for (call_insn
= get_last_insn ();
548 call_insn
&& GET_CODE (call_insn
) != CALL_INSN
;
549 call_insn
= PREV_INSN (call_insn
))
555 /* Mark memory as used for "pure" function call. */
556 if (ecf_flags
& ECF_PURE
)
558 call_fusage
= gen_rtx_EXPR_LIST (VOIDmode
,
559 gen_rtx_USE (VOIDmode
,
560 gen_rtx_MEM (BLKmode
,
561 gen_rtx_SCRATCH (VOIDmode
))), call_fusage
);
564 /* Put the register usage information on the CALL. If there is already
565 some usage information, put ours at the end. */
566 if (CALL_INSN_FUNCTION_USAGE (call_insn
))
570 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
); XEXP (link
, 1) != 0;
571 link
= XEXP (link
, 1))
574 XEXP (link
, 1) = call_fusage
;
577 CALL_INSN_FUNCTION_USAGE (call_insn
) = call_fusage
;
579 /* If this is a const call, then set the insn's unchanging bit. */
580 if (ecf_flags
& (ECF_CONST
| ECF_PURE
))
581 CONST_CALL_P (call_insn
) = 1;
583 /* If this call can't throw, attach a REG_EH_REGION reg note to that
585 if (ecf_flags
& ECF_NOTHROW
)
586 REG_NOTES (call_insn
) = gen_rtx_EXPR_LIST (REG_EH_REGION
, const0_rtx
,
587 REG_NOTES (call_insn
));
589 SIBLING_CALL_P (call_insn
) = ((ecf_flags
& ECF_SIBCALL
) != 0);
591 /* Restore this now, so that we do defer pops for this call's args
592 if the context of the call as a whole permits. */
593 inhibit_defer_pop
= old_inhibit_defer_pop
;
598 CALL_INSN_FUNCTION_USAGE (call_insn
)
599 = gen_rtx_EXPR_LIST (VOIDmode
,
600 gen_rtx_CLOBBER (VOIDmode
, stack_pointer_rtx
),
601 CALL_INSN_FUNCTION_USAGE (call_insn
));
602 rounded_stack_size
-= n_popped
;
603 rounded_stack_size_rtx
= GEN_INT (rounded_stack_size
);
604 stack_pointer_delta
-= n_popped
;
607 if (!ACCUMULATE_OUTGOING_ARGS
)
609 /* If returning from the subroutine does not automatically pop the args,
610 we need an instruction to pop them sooner or later.
611 Perhaps do it now; perhaps just record how much space to pop later.
613 If returning from the subroutine does pop the args, indicate that the
614 stack pointer will be changed. */
616 if (rounded_stack_size
!= 0)
618 if (flag_defer_pop
&& inhibit_defer_pop
== 0
619 && !(ecf_flags
& (ECF_CONST
| ECF_PURE
)))
620 pending_stack_adjust
+= rounded_stack_size
;
622 adjust_stack (rounded_stack_size_rtx
);
625 /* When we accumulate outgoing args, we must avoid any stack manipulations.
626 Restore the stack pointer to its original value now. Usually
627 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
628 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
629 popping variants of functions exist as well.
631 ??? We may optimize similar to defer_pop above, but it is
632 probably not worthwhile.
634 ??? It will be worthwhile to enable combine_stack_adjustments even for
637 anti_adjust_stack (GEN_INT (n_popped
));
640 /* Determine if the function identified by NAME and FNDECL is one with
641 special properties we wish to know about.
643 For example, if the function might return more than one time (setjmp), then
644 set RETURNS_TWICE to a nonzero value.
646 Similarly set LONGJMP for if the function is in the longjmp family.
648 Set MALLOC for any of the standard memory allocation functions which
649 allocate from the heap.
651 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
652 space from the stack such as alloca. */
655 special_function_p (fndecl
, flags
)
659 if (! (flags
& ECF_MALLOC
)
660 && fndecl
&& DECL_NAME (fndecl
)
661 && IDENTIFIER_LENGTH (DECL_NAME (fndecl
)) <= 17
662 /* Exclude functions not at the file scope, or not `extern',
663 since they are not the magic functions we would otherwise
665 && DECL_CONTEXT (fndecl
) == NULL_TREE
&& TREE_PUBLIC (fndecl
))
667 char *name
= IDENTIFIER_POINTER (DECL_NAME (fndecl
));
670 /* We assume that alloca will always be called by name. It
671 makes no sense to pass it as a pointer-to-function to
672 anything that does not understand its behavior. */
673 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl
)) == 6
675 && ! strcmp (name
, "alloca"))
676 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl
)) == 16
678 && ! strcmp (name
, "__builtin_alloca"))))
679 flags
|= ECF_MAY_BE_ALLOCA
;
681 /* Disregard prefix _, __ or __x. */
684 if (name
[1] == '_' && name
[2] == 'x')
686 else if (name
[1] == '_')
695 && (! strcmp (tname
, "setjmp")
696 || ! strcmp (tname
, "setjmp_syscall")))
698 && ! strcmp (tname
, "sigsetjmp"))
700 && ! strcmp (tname
, "savectx")))
701 flags
|= ECF_RETURNS_TWICE
;
704 && ! strcmp (tname
, "siglongjmp"))
705 flags
|= ECF_LONGJMP
;
707 else if ((tname
[0] == 'q' && tname
[1] == 's'
708 && ! strcmp (tname
, "qsetjmp"))
709 || (tname
[0] == 'v' && tname
[1] == 'f'
710 && ! strcmp (tname
, "vfork")))
711 flags
|= ECF_RETURNS_TWICE
;
713 else if (tname
[0] == 'l' && tname
[1] == 'o'
714 && ! strcmp (tname
, "longjmp"))
715 flags
|= ECF_LONGJMP
;
717 else if ((tname
[0] == 'f' && tname
[1] == 'o'
718 && ! strcmp (tname
, "fork"))
719 /* Linux specific: __clone. check NAME to insist on the
720 leading underscores, to avoid polluting the ISO / POSIX
722 || (name
[0] == '_' && name
[1] == '_'
723 && ! strcmp (tname
, "clone"))
724 || (tname
[0] == 'e' && tname
[1] == 'x' && tname
[2] == 'e'
725 && tname
[3] == 'c' && (tname
[4] == 'l' || tname
[4] == 'v')
727 || ((tname
[5] == 'p' || tname
[5] == 'e')
728 && tname
[6] == '\0'))))
729 flags
|= ECF_FORK_OR_EXEC
;
731 /* Do not add any more malloc-like functions to this list,
732 instead mark them as malloc functions using the malloc attribute.
733 Note, realloc is not suitable for attribute malloc since
734 it may return the same address across multiple calls.
735 C++ operator new is not suitable because it is not required
736 to return a unique pointer; indeed, the standard placement new
737 just returns its argument. */
738 else if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl
))) == Pmode
739 && (! strcmp (tname
, "malloc")
740 || ! strcmp (tname
, "calloc")
741 || ! strcmp (tname
, "strdup")))
747 /* Return nonzero when tree represent call to longjmp. */
749 setjmp_call_p (fndecl
)
752 return special_function_p (fndecl
, 0) & ECF_RETURNS_TWICE
;
755 /* Detect flags (function attributes) from the function type node. */
757 flags_from_decl_or_type (exp
)
761 /* ??? We can't set IS_MALLOC for function types? */
764 /* The function exp may have the `malloc' attribute. */
765 if (DECL_P (exp
) && DECL_IS_MALLOC (exp
))
768 /* The function exp may have the `pure' attribute. */
769 if (DECL_P (exp
) && DECL_IS_PURE (exp
))
772 if (TREE_NOTHROW (exp
))
773 flags
|= ECF_NOTHROW
;
776 if (TREE_READONLY (exp
) && !TREE_THIS_VOLATILE (exp
))
779 if (TREE_THIS_VOLATILE (exp
))
780 flags
|= ECF_NORETURN
;
786 /* Precompute all register parameters as described by ARGS, storing values
787 into fields within the ARGS array.
789 NUM_ACTUALS indicates the total number elements in the ARGS array.
791 Set REG_PARM_SEEN if we encounter a register parameter. */
794 precompute_register_parameters (num_actuals
, args
, reg_parm_seen
)
796 struct arg_data
*args
;
803 for (i
= 0; i
< num_actuals
; i
++)
804 if (args
[i
].reg
!= 0 && ! args
[i
].pass_on_stack
)
808 if (args
[i
].value
== 0)
811 args
[i
].value
= expand_expr (args
[i
].tree_value
, NULL_RTX
,
813 preserve_temp_slots (args
[i
].value
);
816 /* ANSI doesn't require a sequence point here,
817 but PCC has one, so this will avoid some problems. */
821 /* If we are to promote the function arg to a wider mode,
824 if (args
[i
].mode
!= TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)))
826 = convert_modes (args
[i
].mode
,
827 TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)),
828 args
[i
].value
, args
[i
].unsignedp
);
830 /* If the value is expensive, and we are inside an appropriately
831 short loop, put the value into a pseudo and then put the pseudo
834 For small register classes, also do this if this call uses
835 register parameters. This is to avoid reload conflicts while
836 loading the parameters registers. */
838 if ((! (GET_CODE (args
[i
].value
) == REG
839 || (GET_CODE (args
[i
].value
) == SUBREG
840 && GET_CODE (SUBREG_REG (args
[i
].value
)) == REG
)))
841 && args
[i
].mode
!= BLKmode
842 && rtx_cost (args
[i
].value
, SET
) > 2
843 && ((SMALL_REGISTER_CLASSES
&& *reg_parm_seen
)
844 || preserve_subexpressions_p ()))
845 args
[i
].value
= copy_to_mode_reg (args
[i
].mode
, args
[i
].value
);
849 #ifdef REG_PARM_STACK_SPACE
851 /* The argument list is the property of the called routine and it
852 may clobber it. If the fixed area has been used for previous
853 parameters, we must save and restore it. */
856 save_fixed_argument_area (reg_parm_stack_space
, argblock
,
857 low_to_save
, high_to_save
)
858 int reg_parm_stack_space
;
864 rtx save_area
= NULL_RTX
;
866 /* Compute the boundary of the that needs to be saved, if any. */
867 #ifdef ARGS_GROW_DOWNWARD
868 for (i
= 0; i
< reg_parm_stack_space
+ 1; i
++)
870 for (i
= 0; i
< reg_parm_stack_space
; i
++)
873 if (i
>= highest_outgoing_arg_in_use
874 || stack_usage_map
[i
] == 0)
877 if (*low_to_save
== -1)
883 if (*low_to_save
>= 0)
885 int num_to_save
= *high_to_save
- *low_to_save
+ 1;
886 enum machine_mode save_mode
887 = mode_for_size (num_to_save
* BITS_PER_UNIT
, MODE_INT
, 1);
890 /* If we don't have the required alignment, must do this in BLKmode. */
891 if ((*low_to_save
& (MIN (GET_MODE_SIZE (save_mode
),
892 BIGGEST_ALIGNMENT
/ UNITS_PER_WORD
) - 1)))
895 #ifdef ARGS_GROW_DOWNWARD
897 = gen_rtx_MEM (save_mode
,
898 memory_address (save_mode
,
899 plus_constant (argblock
,
902 stack_area
= gen_rtx_MEM (save_mode
,
903 memory_address (save_mode
,
904 plus_constant (argblock
,
907 if (save_mode
== BLKmode
)
909 save_area
= assign_stack_temp (BLKmode
, num_to_save
, 0);
910 /* Cannot use emit_block_move here because it can be done by a
911 library call which in turn gets into this place again and deadly
912 infinite recursion happens. */
913 move_by_pieces (validize_mem (save_area
), stack_area
, num_to_save
,
918 save_area
= gen_reg_rtx (save_mode
);
919 emit_move_insn (save_area
, stack_area
);
926 restore_fixed_argument_area (save_area
, argblock
, high_to_save
, low_to_save
)
932 enum machine_mode save_mode
= GET_MODE (save_area
);
933 #ifdef ARGS_GROW_DOWNWARD
935 = gen_rtx_MEM (save_mode
,
936 memory_address (save_mode
,
937 plus_constant (argblock
,
941 = gen_rtx_MEM (save_mode
,
942 memory_address (save_mode
,
943 plus_constant (argblock
,
947 if (save_mode
!= BLKmode
)
948 emit_move_insn (stack_area
, save_area
);
950 /* Cannot use emit_block_move here because it can be done by a library
951 call which in turn gets into this place again and deadly infinite
952 recursion happens. */
953 move_by_pieces (stack_area
, validize_mem (save_area
),
954 high_to_save
- low_to_save
+ 1, PARM_BOUNDARY
);
958 /* If any elements in ARGS refer to parameters that are to be passed in
959 registers, but not in memory, and whose alignment does not permit a
960 direct copy into registers. Copy the values into a group of pseudos
961 which we will later copy into the appropriate hard registers.
963 Pseudos for each unaligned argument will be stored into the array
964 args[argnum].aligned_regs. The caller is responsible for deallocating
965 the aligned_regs array if it is nonzero. */
968 store_unaligned_arguments_into_pseudos (args
, num_actuals
)
969 struct arg_data
*args
;
974 for (i
= 0; i
< num_actuals
; i
++)
975 if (args
[i
].reg
!= 0 && ! args
[i
].pass_on_stack
976 && args
[i
].mode
== BLKmode
977 && (TYPE_ALIGN (TREE_TYPE (args
[i
].tree_value
))
978 < (unsigned int) MIN (BIGGEST_ALIGNMENT
, BITS_PER_WORD
)))
980 int bytes
= int_size_in_bytes (TREE_TYPE (args
[i
].tree_value
));
981 int big_endian_correction
= 0;
983 args
[i
].n_aligned_regs
984 = args
[i
].partial
? args
[i
].partial
985 : (bytes
+ (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
987 args
[i
].aligned_regs
= (rtx
*) xmalloc (sizeof (rtx
)
988 * args
[i
].n_aligned_regs
);
990 /* Structures smaller than a word are aligned to the least
991 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
992 this means we must skip the empty high order bytes when
993 calculating the bit offset. */
994 if (BYTES_BIG_ENDIAN
&& bytes
< UNITS_PER_WORD
)
995 big_endian_correction
= (BITS_PER_WORD
- (bytes
* BITS_PER_UNIT
));
997 for (j
= 0; j
< args
[i
].n_aligned_regs
; j
++)
999 rtx reg
= gen_reg_rtx (word_mode
);
1000 rtx word
= operand_subword_force (args
[i
].value
, j
, BLKmode
);
1001 int bitsize
= MIN (bytes
* BITS_PER_UNIT
, BITS_PER_WORD
);
1002 int bitalign
= TYPE_ALIGN (TREE_TYPE (args
[i
].tree_value
));
1004 args
[i
].aligned_regs
[j
] = reg
;
1006 /* There is no need to restrict this code to loading items
1007 in TYPE_ALIGN sized hunks. The bitfield instructions can
1008 load up entire word sized registers efficiently.
1010 ??? This may not be needed anymore.
1011 We use to emit a clobber here but that doesn't let later
1012 passes optimize the instructions we emit. By storing 0 into
1013 the register later passes know the first AND to zero out the
1014 bitfield being set in the register is unnecessary. The store
1015 of 0 will be deleted as will at least the first AND. */
1017 emit_move_insn (reg
, const0_rtx
);
1019 bytes
-= bitsize
/ BITS_PER_UNIT
;
1020 store_bit_field (reg
, bitsize
, big_endian_correction
, word_mode
,
1021 extract_bit_field (word
, bitsize
, 0, 1, NULL_RTX
,
1022 word_mode
, word_mode
, bitalign
,
1024 bitalign
, BITS_PER_WORD
);
1029 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1032 NUM_ACTUALS is the total number of parameters.
1034 N_NAMED_ARGS is the total number of named arguments.
1036 FNDECL is the tree code for the target of this call (if known)
1038 ARGS_SO_FAR holds state needed by the target to know where to place
1041 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1042 for arguments which are passed in registers.
1044 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1045 and may be modified by this routine.
1047 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1048 flags which may may be modified by this routine. */
1051 initialize_argument_information (num_actuals
, args
, args_size
, n_named_args
,
1052 actparms
, fndecl
, args_so_far
,
1053 reg_parm_stack_space
, old_stack_level
,
1054 old_pending_adj
, must_preallocate
,
1056 int num_actuals ATTRIBUTE_UNUSED
;
1057 struct arg_data
*args
;
1058 struct args_size
*args_size
;
1059 int n_named_args ATTRIBUTE_UNUSED
;
1062 CUMULATIVE_ARGS
*args_so_far
;
1063 int reg_parm_stack_space
;
1064 rtx
*old_stack_level
;
1065 int *old_pending_adj
;
1066 int *must_preallocate
;
1069 /* 1 if scanning parms front to back, -1 if scanning back to front. */
1072 /* Count arg position in order args appear. */
1075 struct args_size alignment_pad
;
1079 args_size
->constant
= 0;
1082 /* In this loop, we consider args in the order they are written.
1083 We fill up ARGS from the front or from the back if necessary
1084 so that in any case the first arg to be pushed ends up at the front. */
1086 if (PUSH_ARGS_REVERSED
)
1088 i
= num_actuals
- 1, inc
= -1;
1089 /* In this case, must reverse order of args
1090 so that we compute and push the last arg first. */
1097 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1098 for (p
= actparms
, argpos
= 0; p
; p
= TREE_CHAIN (p
), i
+= inc
, argpos
++)
1100 tree type
= TREE_TYPE (TREE_VALUE (p
));
1102 enum machine_mode mode
;
1104 args
[i
].tree_value
= TREE_VALUE (p
);
1106 /* Replace erroneous argument with constant zero. */
1107 if (type
== error_mark_node
|| !COMPLETE_TYPE_P (type
))
1108 args
[i
].tree_value
= integer_zero_node
, type
= integer_type_node
;
1110 /* If TYPE is a transparent union, pass things the way we would
1111 pass the first field of the union. We have already verified that
1112 the modes are the same. */
1113 if (TREE_CODE (type
) == UNION_TYPE
&& TYPE_TRANSPARENT_UNION (type
))
1114 type
= TREE_TYPE (TYPE_FIELDS (type
));
1116 /* Decide where to pass this arg.
1118 args[i].reg is nonzero if all or part is passed in registers.
1120 args[i].partial is nonzero if part but not all is passed in registers,
1121 and the exact value says how many words are passed in registers.
1123 args[i].pass_on_stack is nonzero if the argument must at least be
1124 computed on the stack. It may then be loaded back into registers
1125 if args[i].reg is nonzero.
1127 These decisions are driven by the FUNCTION_... macros and must agree
1128 with those made by function.c. */
1130 /* See if this argument should be passed by invisible reference. */
1131 if ((TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
1132 && contains_placeholder_p (TYPE_SIZE (type
)))
1133 || TREE_ADDRESSABLE (type
)
1134 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1135 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far
, TYPE_MODE (type
),
1136 type
, argpos
< n_named_args
)
1140 /* If we're compiling a thunk, pass through invisible
1141 references instead of making a copy. */
1142 if (current_function_is_thunk
1143 #ifdef FUNCTION_ARG_CALLEE_COPIES
1144 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far
, TYPE_MODE (type
),
1145 type
, argpos
< n_named_args
)
1146 /* If it's in a register, we must make a copy of it too. */
1147 /* ??? Is this a sufficient test? Is there a better one? */
1148 && !(TREE_CODE (args
[i
].tree_value
) == VAR_DECL
1149 && REG_P (DECL_RTL (args
[i
].tree_value
)))
1150 && ! TREE_ADDRESSABLE (type
))
1154 /* C++ uses a TARGET_EXPR to indicate that we want to make a
1155 new object from the argument. If we are passing by
1156 invisible reference, the callee will do that for us, so we
1157 can strip off the TARGET_EXPR. This is not always safe,
1158 but it is safe in the only case where this is a useful
1159 optimization; namely, when the argument is a plain object.
1160 In that case, the frontend is just asking the backend to
1161 make a bitwise copy of the argument. */
1163 if (TREE_CODE (args
[i
].tree_value
) == TARGET_EXPR
1164 && (DECL_P (TREE_OPERAND (args
[i
].tree_value
, 1)))
1165 && ! REG_P (DECL_RTL (TREE_OPERAND (args
[i
].tree_value
, 1))))
1166 args
[i
].tree_value
= TREE_OPERAND (args
[i
].tree_value
, 1);
1168 args
[i
].tree_value
= build1 (ADDR_EXPR
,
1169 build_pointer_type (type
),
1170 args
[i
].tree_value
);
1171 type
= build_pointer_type (type
);
1175 /* We make a copy of the object and pass the address to the
1176 function being called. */
1179 if (!COMPLETE_TYPE_P (type
)
1180 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
1181 || (flag_stack_check
&& ! STACK_CHECK_BUILTIN
1182 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type
),
1183 STACK_CHECK_MAX_VAR_SIZE
))))
1185 /* This is a variable-sized object. Make space on the stack
1187 rtx size_rtx
= expr_size (TREE_VALUE (p
));
1189 if (*old_stack_level
== 0)
1191 emit_stack_save (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
1192 *old_pending_adj
= pending_stack_adjust
;
1193 pending_stack_adjust
= 0;
1196 copy
= gen_rtx_MEM (BLKmode
,
1197 allocate_dynamic_stack_space
1198 (size_rtx
, NULL_RTX
, TYPE_ALIGN (type
)));
1199 set_mem_attributes (copy
, type
, 1);
1202 copy
= assign_temp (type
, 0, 1, 0);
1204 store_expr (args
[i
].tree_value
, copy
, 0);
1205 *ecf_flags
&= ~(ECF_CONST
| ECF_PURE
);
1207 args
[i
].tree_value
= build1 (ADDR_EXPR
,
1208 build_pointer_type (type
),
1209 make_tree (type
, copy
));
1210 type
= build_pointer_type (type
);
1214 mode
= TYPE_MODE (type
);
1215 unsignedp
= TREE_UNSIGNED (type
);
1217 #ifdef PROMOTE_FUNCTION_ARGS
1218 mode
= promote_mode (type
, mode
, &unsignedp
, 1);
1221 args
[i
].unsignedp
= unsignedp
;
1222 args
[i
].mode
= mode
;
1224 args
[i
].reg
= FUNCTION_ARG (*args_so_far
, mode
, type
,
1225 argpos
< n_named_args
);
1226 #ifdef FUNCTION_INCOMING_ARG
1227 /* If this is a sibling call and the machine has register windows, the
1228 register window has to be unwinded before calling the routine, so
1229 arguments have to go into the incoming registers. */
1230 args
[i
].tail_call_reg
= FUNCTION_INCOMING_ARG (*args_so_far
, mode
, type
,
1231 argpos
< n_named_args
);
1233 args
[i
].tail_call_reg
= args
[i
].reg
;
1236 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1239 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far
, mode
, type
,
1240 argpos
< n_named_args
);
1243 args
[i
].pass_on_stack
= MUST_PASS_IN_STACK (mode
, type
);
1245 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1246 it means that we are to pass this arg in the register(s) designated
1247 by the PARALLEL, but also to pass it in the stack. */
1248 if (args
[i
].reg
&& GET_CODE (args
[i
].reg
) == PARALLEL
1249 && XEXP (XVECEXP (args
[i
].reg
, 0, 0), 0) == 0)
1250 args
[i
].pass_on_stack
= 1;
1252 /* If this is an addressable type, we must preallocate the stack
1253 since we must evaluate the object into its final location.
1255 If this is to be passed in both registers and the stack, it is simpler
1257 if (TREE_ADDRESSABLE (type
)
1258 || (args
[i
].pass_on_stack
&& args
[i
].reg
!= 0))
1259 *must_preallocate
= 1;
1261 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1262 we cannot consider this function call constant. */
1263 if (TREE_ADDRESSABLE (type
))
1264 *ecf_flags
&= ~(ECF_CONST
| ECF_PURE
);
1266 /* Compute the stack-size of this argument. */
1267 if (args
[i
].reg
== 0 || args
[i
].partial
!= 0
1268 || reg_parm_stack_space
> 0
1269 || args
[i
].pass_on_stack
)
1270 locate_and_pad_parm (mode
, type
,
1271 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1276 fndecl
, args_size
, &args
[i
].offset
,
1277 &args
[i
].size
, &alignment_pad
);
1279 #ifndef ARGS_GROW_DOWNWARD
1280 args
[i
].slot_offset
= *args_size
;
1283 args
[i
].alignment_pad
= alignment_pad
;
1285 /* If a part of the arg was put into registers,
1286 don't include that part in the amount pushed. */
1287 if (reg_parm_stack_space
== 0 && ! args
[i
].pass_on_stack
)
1288 args
[i
].size
.constant
-= ((args
[i
].partial
* UNITS_PER_WORD
)
1289 / (PARM_BOUNDARY
/ BITS_PER_UNIT
)
1290 * (PARM_BOUNDARY
/ BITS_PER_UNIT
));
1292 /* Update ARGS_SIZE, the total stack space for args so far. */
1294 args_size
->constant
+= args
[i
].size
.constant
;
1295 if (args
[i
].size
.var
)
1297 ADD_PARM_SIZE (*args_size
, args
[i
].size
.var
);
1300 /* Since the slot offset points to the bottom of the slot,
1301 we must record it after incrementing if the args grow down. */
1302 #ifdef ARGS_GROW_DOWNWARD
1303 args
[i
].slot_offset
= *args_size
;
1305 args
[i
].slot_offset
.constant
= -args_size
->constant
;
1307 SUB_PARM_SIZE (args
[i
].slot_offset
, args_size
->var
);
1310 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1311 have been used, etc. */
1313 FUNCTION_ARG_ADVANCE (*args_so_far
, TYPE_MODE (type
), type
,
1314 argpos
< n_named_args
);
1318 /* Update ARGS_SIZE to contain the total size for the argument block.
1319 Return the original constant component of the argument block's size.
1321 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1322 for arguments passed in registers. */
1325 compute_argument_block_size (reg_parm_stack_space
, args_size
,
1326 preferred_stack_boundary
)
1327 int reg_parm_stack_space
;
1328 struct args_size
*args_size
;
1329 int preferred_stack_boundary ATTRIBUTE_UNUSED
;
1331 int unadjusted_args_size
= args_size
->constant
;
1333 /* For accumulate outgoing args mode we don't need to align, since the frame
1334 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1335 backends from generating missaligned frame sizes. */
1336 #ifdef STACK_BOUNDARY
1337 if (ACCUMULATE_OUTGOING_ARGS
&& preferred_stack_boundary
> STACK_BOUNDARY
)
1338 preferred_stack_boundary
= STACK_BOUNDARY
;
1341 /* Compute the actual size of the argument block required. The variable
1342 and constant sizes must be combined, the size may have to be rounded,
1343 and there may be a minimum required size. */
1347 args_size
->var
= ARGS_SIZE_TREE (*args_size
);
1348 args_size
->constant
= 0;
1350 #ifdef PREFERRED_STACK_BOUNDARY
1351 preferred_stack_boundary
/= BITS_PER_UNIT
;
1352 if (preferred_stack_boundary
> 1)
1354 /* We don't handle this case yet. To handle it correctly we have
1355 to add the delta, round and substract the delta.
1356 Currently no machine description requires this support. */
1357 if (stack_pointer_delta
& (preferred_stack_boundary
- 1))
1359 args_size
->var
= round_up (args_size
->var
, preferred_stack_boundary
);
1363 if (reg_parm_stack_space
> 0)
1366 = size_binop (MAX_EXPR
, args_size
->var
,
1367 ssize_int (reg_parm_stack_space
));
1369 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1370 /* The area corresponding to register parameters is not to count in
1371 the size of the block we need. So make the adjustment. */
1373 = size_binop (MINUS_EXPR
, args_size
->var
,
1374 ssize_int (reg_parm_stack_space
));
1380 #ifdef PREFERRED_STACK_BOUNDARY
1381 preferred_stack_boundary
/= BITS_PER_UNIT
;
1382 if (preferred_stack_boundary
< 1)
1383 preferred_stack_boundary
= 1;
1384 args_size
->constant
= (((args_size
->constant
1385 + stack_pointer_delta
1386 + preferred_stack_boundary
- 1)
1387 / preferred_stack_boundary
1388 * preferred_stack_boundary
)
1389 - stack_pointer_delta
);
1392 args_size
->constant
= MAX (args_size
->constant
,
1393 reg_parm_stack_space
);
1395 #ifdef MAYBE_REG_PARM_STACK_SPACE
1396 if (reg_parm_stack_space
== 0)
1397 args_size
->constant
= 0;
1400 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1401 args_size
->constant
-= reg_parm_stack_space
;
1404 return unadjusted_args_size
;
1407 /* Precompute parameters as needed for a function call.
1409 FLAGS is mask of ECF_* constants.
1411 NUM_ACTUALS is the number of arguments.
1413 ARGS is an array containing information for each argument; this routine
1414 fills in the INITIAL_VALUE and VALUE fields for each precomputed argument.
1418 precompute_arguments (flags
, num_actuals
, args
)
1421 struct arg_data
*args
;
1425 /* If this function call is cse'able, precompute all the parameters.
1426 Note that if the parameter is constructed into a temporary, this will
1427 cause an additional copy because the parameter will be constructed
1428 into a temporary location and then copied into the outgoing arguments.
1429 If a parameter contains a call to alloca and this function uses the
1430 stack, precompute the parameter. */
1432 /* If we preallocated the stack space, and some arguments must be passed
1433 on the stack, then we must precompute any parameter which contains a
1434 function call which will store arguments on the stack.
1435 Otherwise, evaluating the parameter may clobber previous parameters
1436 which have already been stored into the stack. (we have code to avoid
1437 such case by saving the ougoing stack arguments, but it results in
1440 for (i
= 0; i
< num_actuals
; i
++)
1441 if ((flags
& (ECF_CONST
| ECF_PURE
))
1442 || calls_function (args
[i
].tree_value
, !ACCUMULATE_OUTGOING_ARGS
))
1444 /* If this is an addressable type, we cannot pre-evaluate it. */
1445 if (TREE_ADDRESSABLE (TREE_TYPE (args
[i
].tree_value
)))
1451 = expand_expr (args
[i
].tree_value
, NULL_RTX
, VOIDmode
, 0);
1453 preserve_temp_slots (args
[i
].value
);
1456 /* ANSI doesn't require a sequence point here,
1457 but PCC has one, so this will avoid some problems. */
1460 args
[i
].initial_value
= args
[i
].value
1461 = protect_from_queue (args
[i
].value
, 0);
1463 if (TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)) != args
[i
].mode
)
1466 = convert_modes (args
[i
].mode
,
1467 TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)),
1468 args
[i
].value
, args
[i
].unsignedp
);
1469 #ifdef PROMOTE_FOR_CALL_ONLY
1470 /* CSE will replace this only if it contains args[i].value
1471 pseudo, so convert it down to the declared mode using
1473 if (GET_CODE (args
[i
].value
) == REG
1474 && GET_MODE_CLASS (args
[i
].mode
) == MODE_INT
)
1476 args
[i
].initial_value
1477 = gen_rtx_SUBREG (TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)),
1479 SUBREG_PROMOTED_VAR_P (args
[i
].initial_value
) = 1;
1480 SUBREG_PROMOTED_UNSIGNED_P (args
[i
].initial_value
)
1481 = args
[i
].unsignedp
;
1488 /* Given the current state of MUST_PREALLOCATE and information about
1489 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1490 compute and return the final value for MUST_PREALLOCATE. */
1493 finalize_must_preallocate (must_preallocate
, num_actuals
, args
, args_size
)
1494 int must_preallocate
;
1496 struct arg_data
*args
;
1497 struct args_size
*args_size
;
1499 /* See if we have or want to preallocate stack space.
1501 If we would have to push a partially-in-regs parm
1502 before other stack parms, preallocate stack space instead.
1504 If the size of some parm is not a multiple of the required stack
1505 alignment, we must preallocate.
1507 If the total size of arguments that would otherwise create a copy in
1508 a temporary (such as a CALL) is more than half the total argument list
1509 size, preallocation is faster.
1511 Another reason to preallocate is if we have a machine (like the m88k)
1512 where stack alignment is required to be maintained between every
1513 pair of insns, not just when the call is made. However, we assume here
1514 that such machines either do not have push insns (and hence preallocation
1515 would occur anyway) or the problem is taken care of with
1518 if (! must_preallocate
)
1520 int partial_seen
= 0;
1521 int copy_to_evaluate_size
= 0;
1524 for (i
= 0; i
< num_actuals
&& ! must_preallocate
; i
++)
1526 if (args
[i
].partial
> 0 && ! args
[i
].pass_on_stack
)
1528 else if (partial_seen
&& args
[i
].reg
== 0)
1529 must_preallocate
= 1;
1531 if (TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)) == BLKmode
1532 && (TREE_CODE (args
[i
].tree_value
) == CALL_EXPR
1533 || TREE_CODE (args
[i
].tree_value
) == TARGET_EXPR
1534 || TREE_CODE (args
[i
].tree_value
) == COND_EXPR
1535 || TREE_ADDRESSABLE (TREE_TYPE (args
[i
].tree_value
))))
1536 copy_to_evaluate_size
1537 += int_size_in_bytes (TREE_TYPE (args
[i
].tree_value
));
1540 if (copy_to_evaluate_size
* 2 >= args_size
->constant
1541 && args_size
->constant
> 0)
1542 must_preallocate
= 1;
1544 return must_preallocate
;
1547 /* If we preallocated stack space, compute the address of each argument
1548 and store it into the ARGS array.
1550 We need not ensure it is a valid memory address here; it will be
1551 validized when it is used.
1553 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1556 compute_argument_addresses (args
, argblock
, num_actuals
)
1557 struct arg_data
*args
;
1563 rtx arg_reg
= argblock
;
1564 int i
, arg_offset
= 0;
1566 if (GET_CODE (argblock
) == PLUS
)
1567 arg_reg
= XEXP (argblock
, 0), arg_offset
= INTVAL (XEXP (argblock
, 1));
1569 for (i
= 0; i
< num_actuals
; i
++)
1571 rtx offset
= ARGS_SIZE_RTX (args
[i
].offset
);
1572 rtx slot_offset
= ARGS_SIZE_RTX (args
[i
].slot_offset
);
1575 /* Skip this parm if it will not be passed on the stack. */
1576 if (! args
[i
].pass_on_stack
&& args
[i
].reg
!= 0)
1579 if (GET_CODE (offset
) == CONST_INT
)
1580 addr
= plus_constant (arg_reg
, INTVAL (offset
));
1582 addr
= gen_rtx_PLUS (Pmode
, arg_reg
, offset
);
1584 addr
= plus_constant (addr
, arg_offset
);
1585 args
[i
].stack
= gen_rtx_MEM (args
[i
].mode
, addr
);
1586 set_mem_attributes (args
[i
].stack
,
1587 TREE_TYPE (args
[i
].tree_value
), 1);
1589 if (GET_CODE (slot_offset
) == CONST_INT
)
1590 addr
= plus_constant (arg_reg
, INTVAL (slot_offset
));
1592 addr
= gen_rtx_PLUS (Pmode
, arg_reg
, slot_offset
);
1594 addr
= plus_constant (addr
, arg_offset
);
1595 args
[i
].stack_slot
= gen_rtx_MEM (args
[i
].mode
, addr
);
1596 set_mem_attributes (args
[i
].stack_slot
,
1597 TREE_TYPE (args
[i
].tree_value
), 1);
1602 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1603 in a call instruction.
1605 FNDECL is the tree node for the target function. For an indirect call
1606 FNDECL will be NULL_TREE.
1608 EXP is the CALL_EXPR for this call. */
1611 rtx_for_function_call (fndecl
, exp
)
1617 /* Get the function to call, in the form of RTL. */
1620 /* If this is the first use of the function, see if we need to
1621 make an external definition for it. */
1622 if (! TREE_USED (fndecl
))
1624 assemble_external (fndecl
);
1625 TREE_USED (fndecl
) = 1;
1628 /* Get a SYMBOL_REF rtx for the function address. */
1629 funexp
= XEXP (DECL_RTL (fndecl
), 0);
1632 /* Generate an rtx (probably a pseudo-register) for the address. */
1637 expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
1638 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1640 /* Check the function is executable. */
1641 if (current_function_check_memory_usage
)
1643 #ifdef POINTERS_EXTEND_UNSIGNED
1644 /* It might be OK to convert funexp in place, but there's
1645 a lot going on between here and when it happens naturally
1646 that this seems safer. */
1647 funaddr
= convert_memory_address (Pmode
, funexp
);
1649 emit_library_call (chkr_check_exec_libfunc
, 1,
1658 /* Do the register loads required for any wholly-register parms or any
1659 parms which are passed both on the stack and in a register. Their
1660 expressions were already evaluated.
1662 Mark all register-parms as living through the call, putting these USE
1663 insns in the CALL_INSN_FUNCTION_USAGE field. */
1666 load_register_parameters (args
, num_actuals
, call_fusage
, flags
)
1667 struct arg_data
*args
;
1674 #ifdef LOAD_ARGS_REVERSED
1675 for (i
= num_actuals
- 1; i
>= 0; i
--)
1677 for (i
= 0; i
< num_actuals
; i
++)
1680 rtx reg
= ((flags
& ECF_SIBCALL
)
1681 ? args
[i
].tail_call_reg
: args
[i
].reg
);
1682 int partial
= args
[i
].partial
;
1687 /* Set to non-negative if must move a word at a time, even if just
1688 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1689 we just use a normal move insn. This value can be zero if the
1690 argument is a zero size structure with no fields. */
1691 nregs
= (partial
? partial
1692 : (TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)) == BLKmode
1693 ? ((int_size_in_bytes (TREE_TYPE (args
[i
].tree_value
))
1694 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
1697 /* Handle calls that pass values in multiple non-contiguous
1698 locations. The Irix 6 ABI has examples of this. */
1700 if (GET_CODE (reg
) == PARALLEL
)
1701 emit_group_load (reg
, args
[i
].value
,
1702 int_size_in_bytes (TREE_TYPE (args
[i
].tree_value
)),
1703 TYPE_ALIGN (TREE_TYPE (args
[i
].tree_value
)));
1705 /* If simple case, just do move. If normal partial, store_one_arg
1706 has already loaded the register for us. In all other cases,
1707 load the register(s) from memory. */
1709 else if (nregs
== -1)
1710 emit_move_insn (reg
, args
[i
].value
);
1712 /* If we have pre-computed the values to put in the registers in
1713 the case of non-aligned structures, copy them in now. */
1715 else if (args
[i
].n_aligned_regs
!= 0)
1716 for (j
= 0; j
< args
[i
].n_aligned_regs
; j
++)
1717 emit_move_insn (gen_rtx_REG (word_mode
, REGNO (reg
) + j
),
1718 args
[i
].aligned_regs
[j
]);
1720 else if (partial
== 0 || args
[i
].pass_on_stack
)
1721 move_block_to_reg (REGNO (reg
),
1722 validize_mem (args
[i
].value
), nregs
,
1725 /* Handle calls that pass values in multiple non-contiguous
1726 locations. The Irix 6 ABI has examples of this. */
1727 if (GET_CODE (reg
) == PARALLEL
)
1728 use_group_regs (call_fusage
, reg
);
1729 else if (nregs
== -1)
1730 use_reg (call_fusage
, reg
);
1732 use_regs (call_fusage
, REGNO (reg
), nregs
== 0 ? 1 : nregs
);
1737 /* Try to integrate function. See expand_inline_function for documentation
1738 about the parameters. */
1741 try_to_integrate (fndecl
, actparms
, target
, ignore
, type
, structure_value_addr
)
1747 rtx structure_value_addr
;
1752 rtx old_stack_level
= 0;
1753 int reg_parm_stack_space
= 0;
1755 #ifdef REG_PARM_STACK_SPACE
1756 #ifdef MAYBE_REG_PARM_STACK_SPACE
1757 reg_parm_stack_space
= MAYBE_REG_PARM_STACK_SPACE
;
1759 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
1763 before_call
= get_last_insn ();
1765 timevar_push (TV_INTEGRATION
);
1767 temp
= expand_inline_function (fndecl
, actparms
, target
,
1769 structure_value_addr
);
1771 timevar_pop (TV_INTEGRATION
);
1773 /* If inlining succeeded, return. */
1774 if (temp
!= (rtx
) (HOST_WIDE_INT
) - 1)
1776 if (ACCUMULATE_OUTGOING_ARGS
)
1778 /* If the outgoing argument list must be preserved, push
1779 the stack before executing the inlined function if it
1782 for (i
= reg_parm_stack_space
- 1; i
>= 0; i
--)
1783 if (i
< highest_outgoing_arg_in_use
&& stack_usage_map
[i
] != 0)
1786 if (stack_arg_under_construction
|| i
>= 0)
1789 = before_call
? NEXT_INSN (before_call
) : get_insns ();
1790 rtx insn
= NULL_RTX
, seq
;
1792 /* Look for a call in the inline function code.
1793 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1794 nonzero then there is a call and it is not necessary
1795 to scan the insns. */
1797 if (DECL_SAVED_INSNS (fndecl
)->outgoing_args_size
== 0)
1798 for (insn
= first_insn
; insn
; insn
= NEXT_INSN (insn
))
1799 if (GET_CODE (insn
) == CALL_INSN
)
1804 /* Reserve enough stack space so that the largest
1805 argument list of any function call in the inline
1806 function does not overlap the argument list being
1807 evaluated. This is usually an overestimate because
1808 allocate_dynamic_stack_space reserves space for an
1809 outgoing argument list in addition to the requested
1810 space, but there is no way to ask for stack space such
1811 that an argument list of a certain length can be
1814 Add the stack space reserved for register arguments, if
1815 any, in the inline function. What is really needed is the
1816 largest value of reg_parm_stack_space in the inline
1817 function, but that is not available. Using the current
1818 value of reg_parm_stack_space is wrong, but gives
1819 correct results on all supported machines. */
1821 int adjust
= (DECL_SAVED_INSNS (fndecl
)->outgoing_args_size
1822 + reg_parm_stack_space
);
1825 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
1826 allocate_dynamic_stack_space (GEN_INT (adjust
),
1827 NULL_RTX
, BITS_PER_UNIT
);
1830 emit_insns_before (seq
, first_insn
);
1831 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
1836 /* If the result is equivalent to TARGET, return TARGET to simplify
1837 checks in store_expr. They can be equivalent but not equal in the
1838 case of a function that returns BLKmode. */
1839 if (temp
!= target
&& rtx_equal_p (temp
, target
))
1844 /* If inlining failed, mark FNDECL as needing to be compiled
1845 separately after all. If function was declared inline,
1847 if (DECL_INLINE (fndecl
) && warn_inline
&& !flag_no_inline
1848 && optimize
> 0 && !TREE_ADDRESSABLE (fndecl
))
1850 warning_with_decl (fndecl
, "inlining failed in call to `%s'");
1851 warning ("called from here");
1853 mark_addressable (fndecl
);
1854 return (rtx
) (HOST_WIDE_INT
) - 1;
1857 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1858 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1859 bytes, then we would need to push some additional bytes to pad the
1860 arguments. So, we compute an adjust to the stack pointer for an
1861 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1862 bytes. Then, when the arguments are pushed the stack will be perfectly
1863 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1864 be popped after the call. Returns the adjustment. */
1867 combine_pending_stack_adjustment_and_call (unadjusted_args_size
,
1869 preferred_unit_stack_boundary
)
1870 int unadjusted_args_size
;
1871 struct args_size
*args_size
;
1872 int preferred_unit_stack_boundary
;
1874 /* The number of bytes to pop so that the stack will be
1875 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1876 HOST_WIDE_INT adjustment
;
1877 /* The alignment of the stack after the arguments are pushed, if we
1878 just pushed the arguments without adjust the stack here. */
1879 HOST_WIDE_INT unadjusted_alignment
;
1881 unadjusted_alignment
1882 = ((stack_pointer_delta
+ unadjusted_args_size
)
1883 % preferred_unit_stack_boundary
);
1885 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1886 as possible -- leaving just enough left to cancel out the
1887 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1888 PENDING_STACK_ADJUST is non-negative, and congruent to
1889 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1891 /* Begin by trying to pop all the bytes. */
1892 unadjusted_alignment
1893 = (unadjusted_alignment
1894 - (pending_stack_adjust
% preferred_unit_stack_boundary
));
1895 adjustment
= pending_stack_adjust
;
1896 /* Push enough additional bytes that the stack will be aligned
1897 after the arguments are pushed. */
1898 if (unadjusted_alignment
>= 0)
1899 adjustment
-= preferred_unit_stack_boundary
- unadjusted_alignment
;
1901 adjustment
+= unadjusted_alignment
;
1903 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1904 bytes after the call. The right number is the entire
1905 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1906 by the arguments in the first place. */
1908 = pending_stack_adjust
- adjustment
+ unadjusted_args_size
;
1913 /* Generate all the code for a function call
1914 and return an rtx for its value.
1915 Store the value in TARGET (specified as an rtx) if convenient.
1916 If the value is stored in TARGET then TARGET is returned.
1917 If IGNORE is nonzero, then we ignore the value of the function call. */
1920 expand_call (exp
, target
, ignore
)
1925 /* Nonzero if we are currently expanding a call. */
1926 static int currently_expanding_call
= 0;
1928 /* List of actual parameters. */
1929 tree actparms
= TREE_OPERAND (exp
, 1);
1930 /* RTX for the function to be called. */
1932 /* Sequence of insns to perform a tail recursive "call". */
1933 rtx tail_recursion_insns
= NULL_RTX
;
1934 /* Sequence of insns to perform a normal "call". */
1935 rtx normal_call_insns
= NULL_RTX
;
1936 /* Sequence of insns to perform a tail recursive "call". */
1937 rtx tail_call_insns
= NULL_RTX
;
1938 /* Data type of the function. */
1940 /* Declaration of the function being called,
1941 or 0 if the function is computed (not known by name). */
1945 int try_tail_call
= 1;
1946 int try_tail_recursion
= 1;
1949 /* Register in which non-BLKmode value will be returned,
1950 or 0 if no value or if value is BLKmode. */
1952 /* Address where we should return a BLKmode value;
1953 0 if value not BLKmode. */
1954 rtx structure_value_addr
= 0;
1955 /* Nonzero if that address is being passed by treating it as
1956 an extra, implicit first parameter. Otherwise,
1957 it is passed by being copied directly into struct_value_rtx. */
1958 int structure_value_addr_parm
= 0;
1959 /* Size of aggregate value wanted, or zero if none wanted
1960 or if we are using the non-reentrant PCC calling convention
1961 or expecting the value in registers. */
1962 HOST_WIDE_INT struct_value_size
= 0;
1963 /* Nonzero if called function returns an aggregate in memory PCC style,
1964 by returning the address of where to find it. */
1965 int pcc_struct_value
= 0;
1967 /* Number of actual parameters in this call, including struct value addr. */
1969 /* Number of named args. Args after this are anonymous ones
1970 and they must all go on the stack. */
1973 /* Vector of information about each argument.
1974 Arguments are numbered in the order they will be pushed,
1975 not the order they are written. */
1976 struct arg_data
*args
;
1978 /* Total size in bytes of all the stack-parms scanned so far. */
1979 struct args_size args_size
;
1980 struct args_size adjusted_args_size
;
1981 /* Size of arguments before any adjustments (such as rounding). */
1982 int unadjusted_args_size
;
1983 /* Data on reg parms scanned so far. */
1984 CUMULATIVE_ARGS args_so_far
;
1985 /* Nonzero if a reg parm has been scanned. */
1987 /* Nonzero if this is an indirect function call. */
1989 /* Nonzero if we must avoid push-insns in the args for this call.
1990 If stack space is allocated for register parameters, but not by the
1991 caller, then it is preallocated in the fixed part of the stack frame.
1992 So the entire argument block must then be preallocated (i.e., we
1993 ignore PUSH_ROUNDING in that case). */
1995 int must_preallocate
= !PUSH_ARGS
;
1997 /* Size of the stack reserved for parameter registers. */
1998 int reg_parm_stack_space
= 0;
2000 /* Address of space preallocated for stack parms
2001 (on machines that lack push insns), or 0 if space not preallocated. */
2004 /* Mask of ECF_ flags. */
2006 /* Nonzero if this is a call to an inline function. */
2007 int is_integrable
= 0;
2008 #ifdef REG_PARM_STACK_SPACE
2009 /* Define the boundary of the register parm stack space that needs to be
2011 int low_to_save
= -1, high_to_save
;
2012 rtx save_area
= 0; /* Place that it is saved */
2015 int initial_highest_arg_in_use
= highest_outgoing_arg_in_use
;
2016 char *initial_stack_usage_map
= stack_usage_map
;
2017 int old_stack_arg_under_construction
= 0;
2019 rtx old_stack_level
= 0;
2020 int old_pending_adj
= 0;
2021 int old_inhibit_defer_pop
= inhibit_defer_pop
;
2022 int old_stack_allocated
;
2026 /* The alignment of the stack, in bits. */
2027 HOST_WIDE_INT preferred_stack_boundary
;
2028 /* The alignment of the stack, in bytes. */
2029 HOST_WIDE_INT preferred_unit_stack_boundary
;
2031 /* The value of the function call can be put in a hard register. But
2032 if -fcheck-memory-usage, code which invokes functions (and thus
2033 damages some hard registers) can be inserted before using the value.
2034 So, target is always a pseudo-register in that case. */
2035 if (current_function_check_memory_usage
)
2038 /* See if this is "nothrow" function call. */
2039 if (TREE_NOTHROW (exp
))
2040 flags
|= ECF_NOTHROW
;
2042 /* See if we can find a DECL-node for the actual function.
2043 As a result, decide whether this is a call to an integrable function. */
2045 fndecl
= get_callee_fndecl (exp
);
2049 && fndecl
!= current_function_decl
2050 && DECL_INLINE (fndecl
)
2051 && DECL_SAVED_INSNS (fndecl
)
2052 && DECL_SAVED_INSNS (fndecl
)->inlinable
)
2054 else if (! TREE_ADDRESSABLE (fndecl
))
2056 /* In case this function later becomes inlinable,
2057 record that there was already a non-inline call to it.
2059 Use abstraction instead of setting TREE_ADDRESSABLE
2061 if (DECL_INLINE (fndecl
) && warn_inline
&& !flag_no_inline
2064 warning_with_decl (fndecl
, "can't inline call to `%s'");
2065 warning ("called from here");
2067 mark_addressable (fndecl
);
2070 flags
|= flags_from_decl_or_type (fndecl
);
2073 /* If we don't have specific function to call, see if we have a
2074 attributes set in the type. */
2077 p
= TREE_OPERAND (exp
, 0);
2078 flags
|= flags_from_decl_or_type (TREE_TYPE (TREE_TYPE (p
)));
2081 #ifdef REG_PARM_STACK_SPACE
2082 #ifdef MAYBE_REG_PARM_STACK_SPACE
2083 reg_parm_stack_space
= MAYBE_REG_PARM_STACK_SPACE
;
2085 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
2089 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2090 if (reg_parm_stack_space
> 0 && PUSH_ARGS
)
2091 must_preallocate
= 1;
2094 /* Warn if this value is an aggregate type,
2095 regardless of which calling convention we are using for it. */
2096 if (warn_aggregate_return
&& AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
2097 warning ("function call has aggregate value");
2099 /* Set up a place to return a structure. */
2101 /* Cater to broken compilers. */
2102 if (aggregate_value_p (exp
))
2104 /* This call returns a big structure. */
2105 flags
&= ~(ECF_CONST
| ECF_PURE
);
2107 #ifdef PCC_STATIC_STRUCT_RETURN
2109 pcc_struct_value
= 1;
2110 /* Easier than making that case work right. */
2113 /* In case this is a static function, note that it has been
2115 if (! TREE_ADDRESSABLE (fndecl
))
2116 mark_addressable (fndecl
);
2120 #else /* not PCC_STATIC_STRUCT_RETURN */
2122 struct_value_size
= int_size_in_bytes (TREE_TYPE (exp
));
2124 if (target
&& GET_CODE (target
) == MEM
)
2125 structure_value_addr
= XEXP (target
, 0);
2128 /* Assign a temporary to hold the value. */
2131 /* For variable-sized objects, we must be called with a target
2132 specified. If we were to allocate space on the stack here,
2133 we would have no way of knowing when to free it. */
2135 if (struct_value_size
< 0)
2138 /* This DECL is just something to feed to mark_addressable;
2139 it doesn't get pushed. */
2140 d
= build_decl (VAR_DECL
, NULL_TREE
, TREE_TYPE (exp
));
2141 DECL_RTL (d
) = assign_temp (TREE_TYPE (exp
), 1, 0, 1);
2142 mark_addressable (d
);
2143 mark_temp_addr_taken (DECL_RTL (d
));
2144 structure_value_addr
= XEXP (DECL_RTL (d
), 0);
2149 #endif /* not PCC_STATIC_STRUCT_RETURN */
2152 /* If called function is inline, try to integrate it. */
2156 rtx temp
= try_to_integrate (fndecl
, actparms
, target
,
2157 ignore
, TREE_TYPE (exp
),
2158 structure_value_addr
);
2159 if (temp
!= (rtx
) (HOST_WIDE_INT
) - 1)
2163 if (fndecl
&& DECL_NAME (fndecl
))
2164 name
= IDENTIFIER_POINTER (DECL_NAME (fndecl
));
2166 /* Figure out the amount to which the stack should be aligned. */
2167 #ifdef PREFERRED_STACK_BOUNDARY
2168 preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
2170 preferred_stack_boundary
= STACK_BOUNDARY
;
2173 /* Operand 0 is a pointer-to-function; get the type of the function. */
2174 funtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
2175 if (! POINTER_TYPE_P (funtype
))
2177 funtype
= TREE_TYPE (funtype
);
2179 /* See if this is a call to a function that can return more than once
2180 or a call to longjmp or malloc. */
2181 flags
|= special_function_p (fndecl
, flags
);
2183 if (flags
& ECF_MAY_BE_ALLOCA
)
2184 current_function_calls_alloca
= 1;
2186 /* If struct_value_rtx is 0, it means pass the address
2187 as if it were an extra parameter. */
2188 if (structure_value_addr
&& struct_value_rtx
== 0)
2190 /* If structure_value_addr is a REG other than
2191 virtual_outgoing_args_rtx, we can use always use it. If it
2192 is not a REG, we must always copy it into a register.
2193 If it is virtual_outgoing_args_rtx, we must copy it to another
2194 register in some cases. */
2195 rtx temp
= (GET_CODE (structure_value_addr
) != REG
2196 || (ACCUMULATE_OUTGOING_ARGS
2197 && stack_arg_under_construction
2198 && structure_value_addr
== virtual_outgoing_args_rtx
)
2199 ? copy_addr_to_reg (structure_value_addr
)
2200 : structure_value_addr
);
2203 = tree_cons (error_mark_node
,
2204 make_tree (build_pointer_type (TREE_TYPE (funtype
)),
2207 structure_value_addr_parm
= 1;
2210 /* Count the arguments and set NUM_ACTUALS. */
2211 for (p
= actparms
, num_actuals
= 0; p
; p
= TREE_CHAIN (p
))
2214 /* Compute number of named args.
2215 Normally, don't include the last named arg if anonymous args follow.
2216 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
2217 (If no anonymous args follow, the result of list_length is actually
2218 one too large. This is harmless.)
2220 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
2221 zero, this machine will be able to place unnamed args that were
2222 passed in registers into the stack. So treat all args as named.
2223 This allows the insns emitting for a specific argument list to be
2224 independent of the function declaration.
2226 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any
2227 reliable way to pass unnamed args in registers, so we must force
2228 them into memory. */
2230 if ((STRICT_ARGUMENT_NAMING
2231 || ! PRETEND_OUTGOING_VARARGS_NAMED
)
2232 && TYPE_ARG_TYPES (funtype
) != 0)
2234 = (list_length (TYPE_ARG_TYPES (funtype
))
2235 /* Don't include the last named arg. */
2236 - (STRICT_ARGUMENT_NAMING
? 0 : 1)
2237 /* Count the struct value address, if it is passed as a parm. */
2238 + structure_value_addr_parm
);
2240 /* If we know nothing, treat all args as named. */
2241 n_named_args
= num_actuals
;
2243 /* Start updating where the next arg would go.
2245 On some machines (such as the PA) indirect calls have a different
2246 calling convention than normal calls. The last argument in
2247 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2249 INIT_CUMULATIVE_ARGS (args_so_far
, funtype
, NULL_RTX
, (fndecl
== 0));
2252 /* Make a vector to hold all the information about each arg. */
2253 args
= (struct arg_data
*) alloca (num_actuals
2254 * sizeof (struct arg_data
));
2255 bzero ((char *) args
, num_actuals
* sizeof (struct arg_data
));
2257 /* Build up entries inthe ARGS array, compute the size of the arguments
2258 into ARGS_SIZE, etc. */
2259 initialize_argument_information (num_actuals
, args
, &args_size
,
2260 n_named_args
, actparms
, fndecl
,
2261 &args_so_far
, reg_parm_stack_space
,
2262 &old_stack_level
, &old_pending_adj
,
2263 &must_preallocate
, &flags
);
2267 /* If this function requires a variable-sized argument list, don't
2268 try to make a cse'able block for this call. We may be able to
2269 do this eventually, but it is too complicated to keep track of
2270 what insns go in the cse'able block and which don't. */
2272 flags
&= ~(ECF_CONST
| ECF_PURE
);
2273 must_preallocate
= 1;
2276 /* Now make final decision about preallocating stack space. */
2277 must_preallocate
= finalize_must_preallocate (must_preallocate
,
2281 /* If the structure value address will reference the stack pointer, we
2282 must stabilize it. We don't need to do this if we know that we are
2283 not going to adjust the stack pointer in processing this call. */
2285 if (structure_value_addr
2286 && (reg_mentioned_p (virtual_stack_dynamic_rtx
, structure_value_addr
)
2287 || reg_mentioned_p (virtual_outgoing_args_rtx
,
2288 structure_value_addr
))
2290 || (!ACCUMULATE_OUTGOING_ARGS
&& args_size
.constant
)))
2291 structure_value_addr
= copy_to_reg (structure_value_addr
);
2293 /* Tail calls can make things harder to debug, and we're traditionally
2294 pushed these optimizations into -O2. Don't try if we're already
2295 expanding a call, as that means we're an argument. Similarly, if
2296 there's pending loops or cleanups we know there's code to follow
2299 If rtx_equal_function_value_matters is false, that means we've
2300 finished with regular parsing. Which means that some of the
2301 machinery we use to generate tail-calls is no longer in place.
2302 This is most often true of sjlj-exceptions, which we couldn't
2303 tail-call to anyway. */
2305 if (currently_expanding_call
++ != 0
2306 || !flag_optimize_sibling_calls
2307 || !rtx_equal_function_value_matters
2308 || !stmt_loop_nest_empty ()
2309 || any_pending_cleanups (1)
2311 try_tail_call
= try_tail_recursion
= 0;
2313 /* Tail recursion fails, when we are not dealing with recursive calls. */
2314 if (!try_tail_recursion
2315 || TREE_CODE (TREE_OPERAND (exp
, 0)) != ADDR_EXPR
2316 || TREE_OPERAND (TREE_OPERAND (exp
, 0), 0) != current_function_decl
)
2317 try_tail_recursion
= 0;
2319 /* Rest of purposes for tail call optimizations to fail. */
2321 #ifdef HAVE_sibcall_epilogue
2322 !HAVE_sibcall_epilogue
2327 /* Doing sibling call optimization needs some work, since
2328 structure_value_addr can be allocated on the stack.
2329 It does not seem worth the effort since few optimizable
2330 sibling calls will return a structure. */
2331 || structure_value_addr
!= NULL_RTX
2332 /* If the register holding the address is a callee saved
2333 register, then we lose. We have no way to prevent that,
2334 so we only allow calls to named functions. */
2335 /* ??? This could be done by having the insn constraints
2336 use a register class that is all call-clobbered. Any
2337 reload insns generated to fix things up would appear
2338 before the sibcall_epilogue. */
2339 || fndecl
== NULL_TREE
2340 || (flags
& (ECF_RETURNS_TWICE
| ECF_LONGJMP
))
2341 || !FUNCTION_OK_FOR_SIBCALL (fndecl
)
2342 /* If this function requires more stack slots than the current
2343 function, we cannot change it into a sibling call. */
2344 || args_size
.constant
> current_function_args_size
2345 /* If the callee pops its own arguments, then it must pop exactly
2346 the same number of arguments as the current function. */
2347 || RETURN_POPS_ARGS (fndecl
, funtype
, args_size
.constant
)
2348 != RETURN_POPS_ARGS (current_function_decl
,
2349 TREE_TYPE (current_function_decl
),
2350 current_function_args_size
))
2353 if (try_tail_call
|| try_tail_recursion
)
2356 actparms
= NULL_TREE
;
2357 /* Ok, we're going to give the tail call the old college try.
2358 This means we're going to evaluate the function arguments
2359 up to three times. There are two degrees of badness we can
2360 encounter, those that can be unsaved and those that can't.
2361 (See unsafe_for_reeval commentary for details.)
2363 Generate a new argument list. Pass safe arguments through
2364 unchanged. For the easy badness wrap them in UNSAVE_EXPRs.
2365 For hard badness, evaluate them now and put their resulting
2366 rtx in a temporary VAR_DECL.
2368 initialize_argument_information has ordered the array for the
2369 order to be pushed, and we must remember this when reconstructing
2370 the original argument orde. */
2372 if (PUSH_ARGS_REVERSED
)
2381 i
= num_actuals
- 1;
2385 for (; i
!= end
; i
+= inc
)
2387 switch (unsafe_for_reeval (args
[i
].tree_value
))
2392 case 1: /* Mildly unsafe. */
2393 args
[i
].tree_value
= unsave_expr (args
[i
].tree_value
);
2396 case 2: /* Wildly unsafe. */
2398 tree var
= build_decl (VAR_DECL
, NULL_TREE
,
2399 TREE_TYPE (args
[i
].tree_value
));
2400 DECL_RTL (var
) = expand_expr (args
[i
].tree_value
, NULL_RTX
,
2401 VOIDmode
, EXPAND_NORMAL
);
2402 args
[i
].tree_value
= var
;
2409 /* We need to build actparms for optimize_tail_recursion. We can
2410 safely trash away TREE_PURPOSE, since it is unused by this
2412 if (try_tail_recursion
)
2413 actparms
= tree_cons (NULL_TREE
, args
[i
].tree_value
, actparms
);
2415 /* Expanding one of those dangerous arguments could have added
2416 cleanups, but otherwise give it a whirl. */
2417 if (any_pending_cleanups (1))
2418 try_tail_call
= try_tail_recursion
= 0;
2421 /* Generate a tail recursion sequence when calling ourselves. */
2423 if (try_tail_recursion
)
2425 /* We want to emit any pending stack adjustments before the tail
2426 recursion "call". That way we know any adjustment after the tail
2427 recursion call can be ignored if we indeed use the tail recursion
2429 int save_pending_stack_adjust
= pending_stack_adjust
;
2430 int save_stack_pointer_delta
= stack_pointer_delta
;
2432 /* Use a new sequence to hold any RTL we generate. We do not even
2433 know if we will use this RTL yet. The final decision can not be
2434 made until after RTL generation for the entire function is
2437 /* If expanding any of the arguments creates cleanups, we can't
2438 do a tailcall. So, we'll need to pop the pending cleanups
2439 list. If, however, all goes well, and there are no cleanups
2440 then the call to expand_start_target_temps will have no
2442 expand_start_target_temps ();
2443 if (optimize_tail_recursion (actparms
, get_last_insn ()))
2445 if (any_pending_cleanups (1))
2446 try_tail_call
= try_tail_recursion
= 0;
2448 tail_recursion_insns
= get_insns ();
2450 expand_end_target_temps ();
2453 /* Restore the original pending stack adjustment for the sibling and
2454 normal call cases below. */
2455 pending_stack_adjust
= save_pending_stack_adjust
;
2456 stack_pointer_delta
= save_stack_pointer_delta
;
2459 if (profile_arc_flag
&& (flags
& ECF_FORK_OR_EXEC
))
2461 /* A fork duplicates the profile information, and an exec discards
2462 it. We can't rely on fork/exec to be paired. So write out the
2463 profile information we have gathered so far, and clear it. */
2464 /* ??? When Linux's __clone is called with CLONE_VM set, profiling
2465 is subject to race conditions, just as with multithreaded
2468 emit_library_call (gen_rtx_SYMBOL_REF (Pmode
, "__bb_fork_func"), 0,
2472 /* Ensure current function's preferred stack boundary is at least
2473 what we need. We don't have to increase alignment for recursive
2475 if (cfun
->preferred_stack_boundary
< preferred_stack_boundary
2476 && fndecl
!= current_function_decl
)
2477 cfun
->preferred_stack_boundary
= preferred_stack_boundary
;
2479 preferred_unit_stack_boundary
= preferred_stack_boundary
/ BITS_PER_UNIT
;
2481 function_call_count
++;
2483 /* We want to make two insn chains; one for a sibling call, the other
2484 for a normal call. We will select one of the two chains after
2485 initial RTL generation is complete. */
2486 for (pass
= 0; pass
< 2; pass
++)
2488 int sibcall_failure
= 0;
2489 /* We want to emit ay pending stack adjustments before the tail
2490 recursion "call". That way we know any adjustment after the tail
2491 recursion call can be ignored if we indeed use the tail recursion
2493 int save_pending_stack_adjust
= 0;
2494 int save_stack_pointer_delta
= 0;
2496 rtx before_call
, next_arg_reg
;
2500 if (! try_tail_call
)
2503 /* Emit any queued insns now; otherwise they would end up in
2504 only one of the alternates. */
2507 /* State variables we need to save and restore between
2509 save_pending_stack_adjust
= pending_stack_adjust
;
2510 save_stack_pointer_delta
= stack_pointer_delta
;
2513 flags
&= ~ECF_SIBCALL
;
2515 flags
|= ECF_SIBCALL
;
2517 /* Other state variables that we must reinitialize each time
2518 through the loop (that are not initialized by the loop itself). */
2522 /* Start a new sequence for the normal call case.
2524 From this point on, if the sibling call fails, we want to set
2525 sibcall_failure instead of continuing the loop. */
2530 /* We know at this point that there are not currently any
2531 pending cleanups. If, however, in the process of evaluating
2532 the arguments we were to create some, we'll need to be
2533 able to get rid of them. */
2534 expand_start_target_temps ();
2537 /* When calling a const function, we must pop the stack args right away,
2538 so that the pop is deleted or moved with the call. */
2539 if (flags
& (ECF_CONST
| ECF_PURE
))
2542 /* Don't let pending stack adjusts add up to too much.
2543 Also, do all pending adjustments now if there is any chance
2544 this might be a call to alloca or if we are expanding a sibling
2546 if (pending_stack_adjust
>= 32
2547 || (pending_stack_adjust
> 0 && (flags
& ECF_MAY_BE_ALLOCA
))
2549 do_pending_stack_adjust ();
2551 /* Push the temporary stack slot level so that we can free any
2552 temporaries we make. */
2556 #ifdef FINAL_REG_PARM_STACK_SPACE
2557 reg_parm_stack_space
= FINAL_REG_PARM_STACK_SPACE (args_size
.constant
,
2560 /* Precompute any arguments as needed. */
2562 precompute_arguments (flags
, num_actuals
, args
);
2564 /* Now we are about to start emitting insns that can be deleted
2565 if a libcall is deleted. */
2566 if (flags
& (ECF_CONST
| ECF_PURE
| ECF_MALLOC
))
2569 adjusted_args_size
= args_size
;
2570 /* Compute the actual size of the argument block required. The variable
2571 and constant sizes must be combined, the size may have to be rounded,
2572 and there may be a minimum required size. When generating a sibcall
2573 pattern, do not round up, since we'll be re-using whatever space our
2575 unadjusted_args_size
2576 = compute_argument_block_size (reg_parm_stack_space
, &adjusted_args_size
,
2578 : preferred_stack_boundary
));
2580 old_stack_allocated
= stack_pointer_delta
- pending_stack_adjust
;
2582 /* The argument block when performing a sibling call is the
2583 incoming argument block. */
2585 argblock
= virtual_incoming_args_rtx
;
2587 /* If we have no actual push instructions, or shouldn't use them,
2588 make space for all args right now. */
2589 else if (adjusted_args_size
.var
!= 0)
2591 if (old_stack_level
== 0)
2593 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
2594 old_pending_adj
= pending_stack_adjust
;
2595 pending_stack_adjust
= 0;
2596 /* stack_arg_under_construction says whether a stack arg is
2597 being constructed at the old stack level. Pushing the stack
2598 gets a clean outgoing argument block. */
2599 old_stack_arg_under_construction
= stack_arg_under_construction
;
2600 stack_arg_under_construction
= 0;
2602 argblock
= push_block (ARGS_SIZE_RTX (adjusted_args_size
), 0, 0);
2606 /* Note that we must go through the motions of allocating an argument
2607 block even if the size is zero because we may be storing args
2608 in the area reserved for register arguments, which may be part of
2611 int needed
= adjusted_args_size
.constant
;
2613 /* Store the maximum argument space used. It will be pushed by
2614 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2617 if (needed
> current_function_outgoing_args_size
)
2618 current_function_outgoing_args_size
= needed
;
2620 if (must_preallocate
)
2622 if (ACCUMULATE_OUTGOING_ARGS
)
2624 /* Since the stack pointer will never be pushed, it is
2625 possible for the evaluation of a parm to clobber
2626 something we have already written to the stack.
2627 Since most function calls on RISC machines do not use
2628 the stack, this is uncommon, but must work correctly.
2630 Therefore, we save any area of the stack that was already
2631 written and that we are using. Here we set up to do this
2632 by making a new stack usage map from the old one. The
2633 actual save will be done by store_one_arg.
2635 Another approach might be to try to reorder the argument
2636 evaluations to avoid this conflicting stack usage. */
2638 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2639 /* Since we will be writing into the entire argument area,
2640 the map must be allocated for its entire size, not just
2641 the part that is the responsibility of the caller. */
2642 needed
+= reg_parm_stack_space
;
2645 #ifdef ARGS_GROW_DOWNWARD
2646 highest_outgoing_arg_in_use
= MAX (initial_highest_arg_in_use
,
2649 highest_outgoing_arg_in_use
= MAX (initial_highest_arg_in_use
,
2653 = (char *) alloca (highest_outgoing_arg_in_use
);
2655 if (initial_highest_arg_in_use
)
2656 bcopy (initial_stack_usage_map
, stack_usage_map
,
2657 initial_highest_arg_in_use
);
2659 if (initial_highest_arg_in_use
!= highest_outgoing_arg_in_use
)
2660 bzero (&stack_usage_map
[initial_highest_arg_in_use
],
2661 (highest_outgoing_arg_in_use
2662 - initial_highest_arg_in_use
));
2665 /* The address of the outgoing argument list must not be
2666 copied to a register here, because argblock would be left
2667 pointing to the wrong place after the call to
2668 allocate_dynamic_stack_space below. */
2670 argblock
= virtual_outgoing_args_rtx
;
2674 if (inhibit_defer_pop
== 0)
2676 /* Try to reuse some or all of the pending_stack_adjust
2677 to get this space. */
2679 = (combine_pending_stack_adjustment_and_call
2680 (unadjusted_args_size
,
2681 &adjusted_args_size
,
2682 preferred_unit_stack_boundary
));
2684 /* combine_pending_stack_adjustment_and_call computes
2685 an adjustment before the arguments are allocated.
2686 Account for them and see whether or not the stack
2687 needs to go up or down. */
2688 needed
= unadjusted_args_size
- needed
;
2692 /* We're releasing stack space. */
2693 /* ??? We can avoid any adjustment at all if we're
2694 already aligned. FIXME. */
2695 pending_stack_adjust
= -needed
;
2696 do_pending_stack_adjust ();
2700 /* We need to allocate space. We'll do that in
2701 push_block below. */
2702 pending_stack_adjust
= 0;
2705 /* Special case this because overhead of `push_block' in
2706 this case is non-trivial. */
2708 argblock
= virtual_outgoing_args_rtx
;
2710 argblock
= push_block (GEN_INT (needed
), 0, 0);
2712 /* We only really need to call `copy_to_reg' in the case
2713 where push insns are going to be used to pass ARGBLOCK
2714 to a function call in ARGS. In that case, the stack
2715 pointer changes value from the allocation point to the
2716 call point, and hence the value of
2717 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2718 as well always do it. */
2719 argblock
= copy_to_reg (argblock
);
2721 /* The save/restore code in store_one_arg handles all
2722 cases except one: a constructor call (including a C
2723 function returning a BLKmode struct) to initialize
2725 if (stack_arg_under_construction
)
2727 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2728 rtx push_size
= GEN_INT (reg_parm_stack_space
2729 + adjusted_args_size
.constant
);
2731 rtx push_size
= GEN_INT (adjusted_args_size
.constant
);
2733 if (old_stack_level
== 0)
2735 emit_stack_save (SAVE_BLOCK
, &old_stack_level
,
2737 old_pending_adj
= pending_stack_adjust
;
2738 pending_stack_adjust
= 0;
2739 /* stack_arg_under_construction says whether a stack
2740 arg is being constructed at the old stack level.
2741 Pushing the stack gets a clean outgoing argument
2743 old_stack_arg_under_construction
2744 = stack_arg_under_construction
;
2745 stack_arg_under_construction
= 0;
2746 /* Make a new map for the new argument list. */
2747 stack_usage_map
= (char *)
2748 alloca (highest_outgoing_arg_in_use
);
2749 bzero (stack_usage_map
, highest_outgoing_arg_in_use
);
2750 highest_outgoing_arg_in_use
= 0;
2752 allocate_dynamic_stack_space (push_size
, NULL_RTX
,
2755 /* If argument evaluation might modify the stack pointer,
2756 copy the address of the argument list to a register. */
2757 for (i
= 0; i
< num_actuals
; i
++)
2758 if (args
[i
].pass_on_stack
)
2760 argblock
= copy_addr_to_reg (argblock
);
2767 compute_argument_addresses (args
, argblock
, num_actuals
);
2769 #ifdef PREFERRED_STACK_BOUNDARY
2770 /* If we push args individually in reverse order, perform stack alignment
2771 before the first push (the last arg). */
2772 if (PUSH_ARGS_REVERSED
&& argblock
== 0
2773 && adjusted_args_size
.constant
!= unadjusted_args_size
)
2775 /* When the stack adjustment is pending, we get better code
2776 by combining the adjustments. */
2777 if (pending_stack_adjust
2778 && ! (flags
& (ECF_CONST
| ECF_PURE
))
2779 && ! inhibit_defer_pop
)
2781 pending_stack_adjust
2782 = (combine_pending_stack_adjustment_and_call
2783 (unadjusted_args_size
,
2784 &adjusted_args_size
,
2785 preferred_unit_stack_boundary
));
2786 do_pending_stack_adjust ();
2788 else if (argblock
== 0)
2789 anti_adjust_stack (GEN_INT (adjusted_args_size
.constant
2790 - unadjusted_args_size
));
2792 /* Now that the stack is properly aligned, pops can't safely
2793 be deferred during the evaluation of the arguments. */
2797 /* Don't try to defer pops if preallocating, not even from the first arg,
2798 since ARGBLOCK probably refers to the SP. */
2802 funexp
= rtx_for_function_call (fndecl
, exp
);
2804 /* Figure out the register where the value, if any, will come back. */
2806 if (TYPE_MODE (TREE_TYPE (exp
)) != VOIDmode
2807 && ! structure_value_addr
)
2809 if (pcc_struct_value
)
2810 valreg
= hard_function_value (build_pointer_type (TREE_TYPE (exp
)),
2811 fndecl
, (pass
== 0));
2813 valreg
= hard_function_value (TREE_TYPE (exp
), fndecl
, (pass
== 0));
2816 /* Precompute all register parameters. It isn't safe to compute anything
2817 once we have started filling any specific hard regs. */
2818 precompute_register_parameters (num_actuals
, args
, ®_parm_seen
);
2820 #ifdef REG_PARM_STACK_SPACE
2821 /* Save the fixed argument area if it's part of the caller's frame and
2822 is clobbered by argument setup for this call. */
2823 if (ACCUMULATE_OUTGOING_ARGS
&& pass
)
2824 save_area
= save_fixed_argument_area (reg_parm_stack_space
, argblock
,
2825 &low_to_save
, &high_to_save
);
2828 /* Now store (and compute if necessary) all non-register parms.
2829 These come before register parms, since they can require block-moves,
2830 which could clobber the registers used for register parms.
2831 Parms which have partial registers are not stored here,
2832 but we do preallocate space here if they want that. */
2834 for (i
= 0; i
< num_actuals
; i
++)
2835 if (args
[i
].reg
== 0 || args
[i
].pass_on_stack
)
2836 store_one_arg (&args
[i
], argblock
, flags
,
2837 adjusted_args_size
.var
!= 0, reg_parm_stack_space
);
2839 /* If we have a parm that is passed in registers but not in memory
2840 and whose alignment does not permit a direct copy into registers,
2841 make a group of pseudos that correspond to each register that we
2843 if (STRICT_ALIGNMENT
)
2844 store_unaligned_arguments_into_pseudos (args
, num_actuals
);
2846 /* Now store any partially-in-registers parm.
2847 This is the last place a block-move can happen. */
2849 for (i
= 0; i
< num_actuals
; i
++)
2850 if (args
[i
].partial
!= 0 && ! args
[i
].pass_on_stack
)
2851 store_one_arg (&args
[i
], argblock
, flags
,
2852 adjusted_args_size
.var
!= 0, reg_parm_stack_space
);
2854 #ifdef PREFERRED_STACK_BOUNDARY
2855 /* If we pushed args in forward order, perform stack alignment
2856 after pushing the last arg. */
2857 if (!PUSH_ARGS_REVERSED
&& argblock
== 0)
2858 anti_adjust_stack (GEN_INT (adjusted_args_size
.constant
2859 - unadjusted_args_size
));
2862 /* If register arguments require space on the stack and stack space
2863 was not preallocated, allocate stack space here for arguments
2864 passed in registers. */
2865 #ifdef OUTGOING_REG_PARM_STACK_SPACE
2866 if (!ACCUMULATE_OUTGOING_ARGS
2867 && must_preallocate
== 0 && reg_parm_stack_space
> 0)
2868 anti_adjust_stack (GEN_INT (reg_parm_stack_space
));
2871 /* Pass the function the address in which to return a
2873 if (pass
!= 0 && structure_value_addr
&& ! structure_value_addr_parm
)
2875 emit_move_insn (struct_value_rtx
,
2877 force_operand (structure_value_addr
,
2880 /* Mark the memory for the aggregate as write-only. */
2881 if (current_function_check_memory_usage
)
2882 emit_library_call (chkr_set_right_libfunc
, 1,
2884 structure_value_addr
, ptr_mode
,
2885 GEN_INT (struct_value_size
),
2886 TYPE_MODE (sizetype
),
2887 GEN_INT (MEMORY_USE_WO
),
2888 TYPE_MODE (integer_type_node
));
2890 if (GET_CODE (struct_value_rtx
) == REG
)
2891 use_reg (&call_fusage
, struct_value_rtx
);
2894 funexp
= prepare_call_address (funexp
, fndecl
, &call_fusage
,
2897 load_register_parameters (args
, num_actuals
, &call_fusage
, flags
);
2899 /* Perform postincrements before actually calling the function. */
2902 /* Save a pointer to the last insn before the call, so that we can
2903 later safely search backwards to find the CALL_INSN. */
2904 before_call
= get_last_insn ();
2906 /* Set up next argument register. For sibling calls on machines
2907 with register windows this should be the incoming register. */
2908 #ifdef FUNCTION_INCOMING_ARG
2910 next_arg_reg
= FUNCTION_INCOMING_ARG (args_so_far
, VOIDmode
,
2914 next_arg_reg
= FUNCTION_ARG (args_so_far
, VOIDmode
,
2917 /* All arguments and registers used for the call must be set up by
2920 #ifdef PREFERRED_STACK_BOUNDARY
2921 /* Stack must be properly aligned now. */
2922 if (pass
&& stack_pointer_delta
% preferred_unit_stack_boundary
)
2926 /* Generate the actual call instruction. */
2927 emit_call_1 (funexp
, fndecl
, funtype
, unadjusted_args_size
,
2928 adjusted_args_size
.constant
, struct_value_size
,
2929 next_arg_reg
, valreg
, old_inhibit_defer_pop
, call_fusage
,
2932 /* Verify that we've deallocated all the stack we used. */
2934 && old_stack_allocated
!= stack_pointer_delta
- pending_stack_adjust
)
2937 /* If call is cse'able, make appropriate pair of reg-notes around it.
2938 Test valreg so we don't crash; may safely ignore `const'
2939 if return type is void. Disable for PARALLEL return values, because
2940 we have no way to move such values into a pseudo register. */
2942 && (flags
& (ECF_CONST
| ECF_PURE
))
2943 && valreg
!= 0 && GET_CODE (valreg
) != PARALLEL
)
2946 rtx temp
= gen_reg_rtx (GET_MODE (valreg
));
2949 /* Mark the return value as a pointer if needed. */
2950 if (TREE_CODE (TREE_TYPE (exp
)) == POINTER_TYPE
)
2951 mark_reg_pointer (temp
, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
))));
2953 /* Construct an "equal form" for the value which mentions all the
2954 arguments in order as well as the function name. */
2955 for (i
= 0; i
< num_actuals
; i
++)
2956 note
= gen_rtx_EXPR_LIST (VOIDmode
, args
[i
].initial_value
, note
);
2957 note
= gen_rtx_EXPR_LIST (VOIDmode
, funexp
, note
);
2959 insns
= get_insns ();
2962 if (flags
& ECF_PURE
)
2963 note
= gen_rtx_EXPR_LIST (VOIDmode
,
2964 gen_rtx_USE (VOIDmode
,
2965 gen_rtx_MEM (BLKmode
,
2966 gen_rtx_SCRATCH (VOIDmode
))), note
);
2968 emit_libcall_block (insns
, temp
, valreg
, note
);
2972 else if (flags
& (ECF_CONST
| ECF_PURE
))
2974 /* Otherwise, just write out the sequence without a note. */
2975 rtx insns
= get_insns ();
2980 else if (flags
& ECF_MALLOC
)
2982 rtx temp
= gen_reg_rtx (GET_MODE (valreg
));
2985 /* The return value from a malloc-like function is a pointer. */
2986 if (TREE_CODE (TREE_TYPE (exp
)) == POINTER_TYPE
)
2987 mark_reg_pointer (temp
, BIGGEST_ALIGNMENT
);
2989 emit_move_insn (temp
, valreg
);
2991 /* The return value from a malloc-like function can not alias
2993 last
= get_last_insn ();
2995 gen_rtx_EXPR_LIST (REG_NOALIAS
, temp
, REG_NOTES (last
));
2997 /* Write out the sequence. */
2998 insns
= get_insns ();
3004 /* For calls to `setjmp', etc., inform flow.c it should complain
3005 if nonvolatile values are live. For functions that cannot return,
3006 inform flow that control does not fall through. */
3008 if ((flags
& (ECF_RETURNS_TWICE
| ECF_NORETURN
| ECF_LONGJMP
)) || pass
== 0)
3010 /* The barrier or NOTE_INSN_SETJMP note must be emitted
3011 immediately after the CALL_INSN. Some ports emit more
3012 than just a CALL_INSN above, so we must search for it here. */
3014 rtx last
= get_last_insn ();
3015 while (GET_CODE (last
) != CALL_INSN
)
3017 last
= PREV_INSN (last
);
3018 /* There was no CALL_INSN? */
3019 if (last
== before_call
)
3023 if (flags
& ECF_RETURNS_TWICE
)
3025 emit_note_after (NOTE_INSN_SETJMP
, last
);
3026 current_function_calls_setjmp
= 1;
3029 emit_barrier_after (last
);
3032 if (flags
& ECF_LONGJMP
)
3033 current_function_calls_longjmp
= 1;
3035 /* If this function is returning into a memory location marked as
3036 readonly, it means it is initializing that location. But we normally
3037 treat functions as not clobbering such locations, so we need to
3038 specify that this one does. */
3039 if (target
!= 0 && GET_CODE (target
) == MEM
3040 && structure_value_addr
!= 0 && RTX_UNCHANGING_P (target
))
3041 emit_insn (gen_rtx_CLOBBER (VOIDmode
, target
));
3043 /* If value type not void, return an rtx for the value. */
3045 /* If there are cleanups to be called, don't use a hard reg as target.
3046 We need to double check this and see if it matters anymore. */
3047 if (any_pending_cleanups (1))
3049 if (target
&& REG_P (target
)
3050 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
3052 sibcall_failure
= 1;
3055 if (TYPE_MODE (TREE_TYPE (exp
)) == VOIDmode
3058 target
= const0_rtx
;
3060 else if (structure_value_addr
)
3062 if (target
== 0 || GET_CODE (target
) != MEM
)
3065 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp
)),
3066 memory_address (TYPE_MODE (TREE_TYPE (exp
)),
3067 structure_value_addr
));
3068 set_mem_attributes (target
, exp
, 1);
3071 else if (pcc_struct_value
)
3073 /* This is the special C++ case where we need to
3074 know what the true target was. We take care to
3075 never use this value more than once in one expression. */
3076 target
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp
)),
3077 copy_to_reg (valreg
));
3078 set_mem_attributes (target
, exp
, 1);
3080 /* Handle calls that return values in multiple non-contiguous locations.
3081 The Irix 6 ABI has examples of this. */
3082 else if (GET_CODE (valreg
) == PARALLEL
)
3084 int bytes
= int_size_in_bytes (TREE_TYPE (exp
));
3088 target
= assign_stack_temp (TYPE_MODE (TREE_TYPE (exp
)),
3090 MEM_SET_IN_STRUCT_P (target
, AGGREGATE_TYPE_P (TREE_TYPE (exp
)));
3091 preserve_temp_slots (target
);
3094 if (! rtx_equal_p (target
, valreg
))
3095 emit_group_store (target
, valreg
, bytes
,
3096 TYPE_ALIGN (TREE_TYPE (exp
)));
3098 /* We can not support sibling calls for this case. */
3099 sibcall_failure
= 1;
3102 && GET_MODE (target
) == TYPE_MODE (TREE_TYPE (exp
))
3103 && GET_MODE (target
) == GET_MODE (valreg
))
3105 /* TARGET and VALREG cannot be equal at this point because the
3106 latter would not have REG_FUNCTION_VALUE_P true, while the
3107 former would if it were referring to the same register.
3109 If they refer to the same register, this move will be a no-op,
3110 except when function inlining is being done. */
3111 emit_move_insn (target
, valreg
);
3113 else if (TYPE_MODE (TREE_TYPE (exp
)) == BLKmode
)
3114 target
= copy_blkmode_from_reg (target
, valreg
, TREE_TYPE (exp
));
3116 target
= copy_to_reg (valreg
);
3118 #ifdef PROMOTE_FUNCTION_RETURN
3119 /* If we promoted this return value, make the proper SUBREG. TARGET
3120 might be const0_rtx here, so be careful. */
3121 if (GET_CODE (target
) == REG
3122 && TYPE_MODE (TREE_TYPE (exp
)) != BLKmode
3123 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
3125 tree type
= TREE_TYPE (exp
);
3126 int unsignedp
= TREE_UNSIGNED (type
);
3128 /* If we don't promote as expected, something is wrong. */
3129 if (GET_MODE (target
)
3130 != promote_mode (type
, TYPE_MODE (type
), &unsignedp
, 1))
3133 target
= gen_rtx_SUBREG (TYPE_MODE (type
), target
, 0);
3134 SUBREG_PROMOTED_VAR_P (target
) = 1;
3135 SUBREG_PROMOTED_UNSIGNED_P (target
) = unsignedp
;
3139 /* If size of args is variable or this was a constructor call for a stack
3140 argument, restore saved stack-pointer value. */
3142 if (old_stack_level
)
3144 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
3145 pending_stack_adjust
= old_pending_adj
;
3146 stack_arg_under_construction
= old_stack_arg_under_construction
;
3147 highest_outgoing_arg_in_use
= initial_highest_arg_in_use
;
3148 stack_usage_map
= initial_stack_usage_map
;
3149 sibcall_failure
= 1;
3151 else if (ACCUMULATE_OUTGOING_ARGS
&& pass
)
3153 #ifdef REG_PARM_STACK_SPACE
3156 restore_fixed_argument_area (save_area
, argblock
,
3157 high_to_save
, low_to_save
);
3161 /* If we saved any argument areas, restore them. */
3162 for (i
= 0; i
< num_actuals
; i
++)
3163 if (args
[i
].save_area
)
3165 enum machine_mode save_mode
= GET_MODE (args
[i
].save_area
);
3167 = gen_rtx_MEM (save_mode
,
3168 memory_address (save_mode
,
3169 XEXP (args
[i
].stack_slot
, 0)));
3171 if (save_mode
!= BLKmode
)
3172 emit_move_insn (stack_area
, args
[i
].save_area
);
3174 emit_block_move (stack_area
,
3175 validize_mem (args
[i
].save_area
),
3176 GEN_INT (args
[i
].size
.constant
),
3180 highest_outgoing_arg_in_use
= initial_highest_arg_in_use
;
3181 stack_usage_map
= initial_stack_usage_map
;
3184 /* If this was alloca, record the new stack level for nonlocal gotos.
3185 Check for the handler slots since we might not have a save area
3186 for non-local gotos. */
3188 if ((flags
& ECF_MAY_BE_ALLOCA
) && nonlocal_goto_handler_slots
!= 0)
3189 emit_stack_save (SAVE_NONLOCAL
, &nonlocal_goto_stack_level
, NULL_RTX
);
3193 /* Free up storage we no longer need. */
3194 for (i
= 0; i
< num_actuals
; ++i
)
3195 if (args
[i
].aligned_regs
)
3196 free (args
[i
].aligned_regs
);
3200 /* Undo the fake expand_start_target_temps we did earlier. If
3201 there had been any cleanups created, we've already set
3203 expand_end_target_temps ();
3206 insns
= get_insns ();
3211 tail_call_insns
= insns
;
3213 /* If something prevents making this a sibling call,
3214 zero out the sequence. */
3215 if (sibcall_failure
)
3216 tail_call_insns
= NULL_RTX
;
3217 /* Restore the pending stack adjustment now that we have
3218 finished generating the sibling call sequence. */
3220 pending_stack_adjust
= save_pending_stack_adjust
;
3221 stack_pointer_delta
= save_stack_pointer_delta
;
3223 /* Prepare arg structure for next iteration. */
3224 for (i
= 0 ; i
< num_actuals
; i
++)
3227 args
[i
].aligned_regs
= 0;
3232 normal_call_insns
= insns
;
3235 /* The function optimize_sibling_and_tail_recursive_calls doesn't
3236 handle CALL_PLACEHOLDERs inside other CALL_PLACEHOLDERs. This
3237 can happen if the arguments to this function call an inline
3238 function who's expansion contains another CALL_PLACEHOLDER.
3240 If there are any C_Ps in any of these sequences, replace them
3241 with their normal call. */
3243 for (insn
= normal_call_insns
; insn
; insn
= NEXT_INSN (insn
))
3244 if (GET_CODE (insn
) == CALL_INSN
3245 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
3246 replace_call_placeholder (insn
, sibcall_use_normal
);
3248 for (insn
= tail_call_insns
; insn
; insn
= NEXT_INSN (insn
))
3249 if (GET_CODE (insn
) == CALL_INSN
3250 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
3251 replace_call_placeholder (insn
, sibcall_use_normal
);
3253 for (insn
= tail_recursion_insns
; insn
; insn
= NEXT_INSN (insn
))
3254 if (GET_CODE (insn
) == CALL_INSN
3255 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
3256 replace_call_placeholder (insn
, sibcall_use_normal
);
3258 /* If this was a potential tail recursion site, then emit a
3259 CALL_PLACEHOLDER with the normal and the tail recursion streams.
3260 One of them will be selected later. */
3261 if (tail_recursion_insns
|| tail_call_insns
)
3263 /* The tail recursion label must be kept around. We could expose
3264 its use in the CALL_PLACEHOLDER, but that creates unwanted edges
3265 and makes determining true tail recursion sites difficult.
3267 So we set LABEL_PRESERVE_P here, then clear it when we select
3268 one of the call sequences after rtl generation is complete. */
3269 if (tail_recursion_insns
)
3270 LABEL_PRESERVE_P (tail_recursion_label
) = 1;
3271 emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode
, normal_call_insns
,
3273 tail_recursion_insns
,
3274 tail_recursion_label
));
3277 emit_insns (normal_call_insns
);
3279 currently_expanding_call
--;
3284 /* Returns nonzero if FUN is the symbol for a library function which can
3288 libfunc_nothrow (fun
)
3291 if (fun
== throw_libfunc
3292 || fun
== rethrow_libfunc
3293 || fun
== sjthrow_libfunc
3294 || fun
== sjpopnthrow_libfunc
)
3300 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3301 The RETVAL parameter specifies whether return value needs to be saved, other
3302 parameters are documented in the emit_library_call function bellow. */
3304 emit_library_call_value_1 (retval
, orgfun
, value
, fn_type
, outmode
, nargs
, p
)
3309 enum machine_mode outmode
;
3313 /* Total size in bytes of all the stack-parms scanned so far. */
3314 struct args_size args_size
;
3315 /* Size of arguments before any adjustments (such as rounding). */
3316 struct args_size original_args_size
;
3317 register int argnum
;
3321 struct args_size alignment_pad
;
3323 CUMULATIVE_ARGS args_so_far
;
3324 struct arg
{ rtx value
; enum machine_mode mode
; rtx reg
; int partial
;
3325 struct args_size offset
; struct args_size size
; rtx save_area
; };
3327 int old_inhibit_defer_pop
= inhibit_defer_pop
;
3328 rtx call_fusage
= 0;
3331 int pcc_struct_value
= 0;
3332 int struct_value_size
= 0;
3334 int reg_parm_stack_space
= 0;
3337 #ifdef REG_PARM_STACK_SPACE
3338 /* Define the boundary of the register parm stack space that needs to be
3340 int low_to_save
= -1, high_to_save
= 0;
3341 rtx save_area
= 0; /* Place that it is saved */
3344 /* Size of the stack reserved for parameter registers. */
3345 int initial_highest_arg_in_use
= highest_outgoing_arg_in_use
;
3346 char *initial_stack_usage_map
= stack_usage_map
;
3348 #ifdef REG_PARM_STACK_SPACE
3349 #ifdef MAYBE_REG_PARM_STACK_SPACE
3350 reg_parm_stack_space
= MAYBE_REG_PARM_STACK_SPACE
;
3352 reg_parm_stack_space
= REG_PARM_STACK_SPACE ((tree
) 0);
3358 else if (fn_type
== 2)
3362 if (libfunc_nothrow (fun
))
3363 flags
|= ECF_NOTHROW
;
3365 #ifdef PREFERRED_STACK_BOUNDARY
3366 /* Ensure current function's preferred stack boundary is at least
3368 if (cfun
->preferred_stack_boundary
< PREFERRED_STACK_BOUNDARY
)
3369 cfun
->preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
3372 /* If this kind of value comes back in memory,
3373 decide where in memory it should come back. */
3374 if (outmode
!= VOIDmode
&& aggregate_value_p (type_for_mode (outmode
, 0)))
3376 #ifdef PCC_STATIC_STRUCT_RETURN
3378 = hard_function_value (build_pointer_type (type_for_mode (outmode
, 0)),
3380 mem_value
= gen_rtx_MEM (outmode
, pointer_reg
);
3381 pcc_struct_value
= 1;
3383 value
= gen_reg_rtx (outmode
);
3384 #else /* not PCC_STATIC_STRUCT_RETURN */
3385 struct_value_size
= GET_MODE_SIZE (outmode
);
3386 if (value
!= 0 && GET_CODE (value
) == MEM
)
3389 mem_value
= assign_stack_temp (outmode
, GET_MODE_SIZE (outmode
), 0);
3392 /* This call returns a big structure. */
3393 flags
&= ~(ECF_CONST
| ECF_PURE
);
3396 /* ??? Unfinished: must pass the memory address as an argument. */
3398 /* Copy all the libcall-arguments out of the varargs data
3399 and into a vector ARGVEC.
3401 Compute how to pass each argument. We only support a very small subset
3402 of the full argument passing conventions to limit complexity here since
3403 library functions shouldn't have many args. */
3405 argvec
= (struct arg
*) alloca ((nargs
+ 1) * sizeof (struct arg
));
3406 bzero ((char *) argvec
, (nargs
+ 1) * sizeof (struct arg
));
3408 INIT_CUMULATIVE_ARGS (args_so_far
, NULL_TREE
, fun
, 0);
3410 args_size
.constant
= 0;
3415 /* Now we are about to start emitting insns that can be deleted
3416 if a libcall is deleted. */
3417 if (flags
& (ECF_CONST
| ECF_PURE
))
3422 /* If there's a structure value address to be passed,
3423 either pass it in the special place, or pass it as an extra argument. */
3424 if (mem_value
&& struct_value_rtx
== 0 && ! pcc_struct_value
)
3426 rtx addr
= XEXP (mem_value
, 0);
3429 /* Make sure it is a reasonable operand for a move or push insn. */
3430 if (GET_CODE (addr
) != REG
&& GET_CODE (addr
) != MEM
3431 && ! (CONSTANT_P (addr
) && LEGITIMATE_CONSTANT_P (addr
)))
3432 addr
= force_operand (addr
, NULL_RTX
);
3434 argvec
[count
].value
= addr
;
3435 argvec
[count
].mode
= Pmode
;
3436 argvec
[count
].partial
= 0;
3438 argvec
[count
].reg
= FUNCTION_ARG (args_so_far
, Pmode
, NULL_TREE
, 1);
3439 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3440 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, Pmode
, NULL_TREE
, 1))
3444 locate_and_pad_parm (Pmode
, NULL_TREE
,
3445 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3448 argvec
[count
].reg
!= 0,
3450 NULL_TREE
, &args_size
, &argvec
[count
].offset
,
3451 &argvec
[count
].size
, &alignment_pad
);
3454 if (argvec
[count
].reg
== 0 || argvec
[count
].partial
!= 0
3455 || reg_parm_stack_space
> 0)
3456 args_size
.constant
+= argvec
[count
].size
.constant
;
3458 FUNCTION_ARG_ADVANCE (args_so_far
, Pmode
, (tree
) 0, 1);
3463 for (; count
< nargs
; count
++)
3465 rtx val
= va_arg (p
, rtx
);
3466 enum machine_mode mode
= va_arg (p
, enum machine_mode
);
3468 /* We cannot convert the arg value to the mode the library wants here;
3469 must do it earlier where we know the signedness of the arg. */
3471 || (GET_MODE (val
) != mode
&& GET_MODE (val
) != VOIDmode
))
3474 /* On some machines, there's no way to pass a float to a library fcn.
3475 Pass it as a double instead. */
3476 #ifdef LIBGCC_NEEDS_DOUBLE
3477 if (LIBGCC_NEEDS_DOUBLE
&& mode
== SFmode
)
3478 val
= convert_modes (DFmode
, SFmode
, val
, 0), mode
= DFmode
;
3481 /* There's no need to call protect_from_queue, because
3482 either emit_move_insn or emit_push_insn will do that. */
3484 /* Make sure it is a reasonable operand for a move or push insn. */
3485 if (GET_CODE (val
) != REG
&& GET_CODE (val
) != MEM
3486 && ! (CONSTANT_P (val
) && LEGITIMATE_CONSTANT_P (val
)))
3487 val
= force_operand (val
, NULL_RTX
);
3489 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3490 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far
, mode
, NULL_TREE
, 1))
3492 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
3493 be viewed as just an efficiency improvement. */
3494 rtx slot
= assign_stack_temp (mode
, GET_MODE_SIZE (mode
), 0);
3495 emit_move_insn (slot
, val
);
3496 val
= force_operand (XEXP (slot
, 0), NULL_RTX
);
3501 argvec
[count
].value
= val
;
3502 argvec
[count
].mode
= mode
;
3504 argvec
[count
].reg
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
3506 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3507 argvec
[count
].partial
3508 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, mode
, NULL_TREE
, 1);
3510 argvec
[count
].partial
= 0;
3513 locate_and_pad_parm (mode
, NULL_TREE
,
3514 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3517 argvec
[count
].reg
!= 0,
3519 NULL_TREE
, &args_size
, &argvec
[count
].offset
,
3520 &argvec
[count
].size
, &alignment_pad
);
3522 if (argvec
[count
].size
.var
)
3525 if (reg_parm_stack_space
== 0 && argvec
[count
].partial
)
3526 argvec
[count
].size
.constant
-= argvec
[count
].partial
* UNITS_PER_WORD
;
3528 if (argvec
[count
].reg
== 0 || argvec
[count
].partial
!= 0
3529 || reg_parm_stack_space
> 0)
3530 args_size
.constant
+= argvec
[count
].size
.constant
;
3532 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, (tree
) 0, 1);
3535 #ifdef FINAL_REG_PARM_STACK_SPACE
3536 reg_parm_stack_space
= FINAL_REG_PARM_STACK_SPACE (args_size
.constant
,
3539 /* If this machine requires an external definition for library
3540 functions, write one out. */
3541 assemble_external_libcall (fun
);
3543 original_args_size
= args_size
;
3544 #ifdef PREFERRED_STACK_BOUNDARY
3545 args_size
.constant
= (((args_size
.constant
3546 + stack_pointer_delta
3550 - stack_pointer_delta
);
3553 args_size
.constant
= MAX (args_size
.constant
,
3554 reg_parm_stack_space
);
3556 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3557 args_size
.constant
-= reg_parm_stack_space
;
3560 if (args_size
.constant
> current_function_outgoing_args_size
)
3561 current_function_outgoing_args_size
= args_size
.constant
;
3563 if (ACCUMULATE_OUTGOING_ARGS
)
3565 /* Since the stack pointer will never be pushed, it is possible for
3566 the evaluation of a parm to clobber something we have already
3567 written to the stack. Since most function calls on RISC machines
3568 do not use the stack, this is uncommon, but must work correctly.
3570 Therefore, we save any area of the stack that was already written
3571 and that we are using. Here we set up to do this by making a new
3572 stack usage map from the old one.
3574 Another approach might be to try to reorder the argument
3575 evaluations to avoid this conflicting stack usage. */
3577 needed
= args_size
.constant
;
3579 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3580 /* Since we will be writing into the entire argument area, the
3581 map must be allocated for its entire size, not just the part that
3582 is the responsibility of the caller. */
3583 needed
+= reg_parm_stack_space
;
3586 #ifdef ARGS_GROW_DOWNWARD
3587 highest_outgoing_arg_in_use
= MAX (initial_highest_arg_in_use
,
3590 highest_outgoing_arg_in_use
= MAX (initial_highest_arg_in_use
,
3593 stack_usage_map
= (char *) alloca (highest_outgoing_arg_in_use
);
3595 if (initial_highest_arg_in_use
)
3596 bcopy (initial_stack_usage_map
, stack_usage_map
,
3597 initial_highest_arg_in_use
);
3599 if (initial_highest_arg_in_use
!= highest_outgoing_arg_in_use
)
3600 bzero (&stack_usage_map
[initial_highest_arg_in_use
],
3601 highest_outgoing_arg_in_use
- initial_highest_arg_in_use
);
3604 /* The address of the outgoing argument list must not be copied to a
3605 register here, because argblock would be left pointing to the
3606 wrong place after the call to allocate_dynamic_stack_space below.
3609 argblock
= virtual_outgoing_args_rtx
;
3614 argblock
= push_block (GEN_INT (args_size
.constant
), 0, 0);
3617 #ifdef PREFERRED_STACK_BOUNDARY
3618 /* If we push args individually in reverse order, perform stack alignment
3619 before the first push (the last arg). */
3620 if (argblock
== 0 && PUSH_ARGS_REVERSED
)
3621 anti_adjust_stack (GEN_INT (args_size
.constant
3622 - original_args_size
.constant
));
3625 if (PUSH_ARGS_REVERSED
)
3636 #ifdef REG_PARM_STACK_SPACE
3637 if (ACCUMULATE_OUTGOING_ARGS
)
3639 /* The argument list is the property of the called routine and it
3640 may clobber it. If the fixed area has been used for previous
3641 parameters, we must save and restore it.
3643 Here we compute the boundary of the that needs to be saved, if any. */
3645 #ifdef ARGS_GROW_DOWNWARD
3646 for (count
= 0; count
< reg_parm_stack_space
+ 1; count
++)
3648 for (count
= 0; count
< reg_parm_stack_space
; count
++)
3651 if (count
>= highest_outgoing_arg_in_use
3652 || stack_usage_map
[count
] == 0)
3655 if (low_to_save
== -1)
3656 low_to_save
= count
;
3658 high_to_save
= count
;
3661 if (low_to_save
>= 0)
3663 int num_to_save
= high_to_save
- low_to_save
+ 1;
3664 enum machine_mode save_mode
3665 = mode_for_size (num_to_save
* BITS_PER_UNIT
, MODE_INT
, 1);
3668 /* If we don't have the required alignment, must do this in BLKmode. */
3669 if ((low_to_save
& (MIN (GET_MODE_SIZE (save_mode
),
3670 BIGGEST_ALIGNMENT
/ UNITS_PER_WORD
) - 1)))
3671 save_mode
= BLKmode
;
3673 #ifdef ARGS_GROW_DOWNWARD
3674 stack_area
= gen_rtx_MEM (save_mode
,
3675 memory_address (save_mode
,
3676 plus_constant (argblock
,
3679 stack_area
= gen_rtx_MEM (save_mode
,
3680 memory_address (save_mode
,
3681 plus_constant (argblock
,
3684 if (save_mode
== BLKmode
)
3686 save_area
= assign_stack_temp (BLKmode
, num_to_save
, 0);
3687 emit_block_move (validize_mem (save_area
), stack_area
,
3688 GEN_INT (num_to_save
), PARM_BOUNDARY
);
3692 save_area
= gen_reg_rtx (save_mode
);
3693 emit_move_insn (save_area
, stack_area
);
3699 /* Push the args that need to be pushed. */
3701 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3702 are to be pushed. */
3703 for (count
= 0; count
< nargs
; count
++, argnum
+= inc
)
3705 register enum machine_mode mode
= argvec
[argnum
].mode
;
3706 register rtx val
= argvec
[argnum
].value
;
3707 rtx reg
= argvec
[argnum
].reg
;
3708 int partial
= argvec
[argnum
].partial
;
3709 int lower_bound
= 0, upper_bound
= 0, i
;
3711 if (! (reg
!= 0 && partial
== 0))
3713 if (ACCUMULATE_OUTGOING_ARGS
)
3715 /* If this is being stored into a pre-allocated, fixed-size,
3716 stack area, save any previous data at that location. */
3718 #ifdef ARGS_GROW_DOWNWARD
3719 /* stack_slot is negative, but we want to index stack_usage_map
3720 with positive values. */
3721 upper_bound
= -argvec
[argnum
].offset
.constant
+ 1;
3722 lower_bound
= upper_bound
- argvec
[argnum
].size
.constant
;
3724 lower_bound
= argvec
[argnum
].offset
.constant
;
3725 upper_bound
= lower_bound
+ argvec
[argnum
].size
.constant
;
3728 for (i
= lower_bound
; i
< upper_bound
; i
++)
3729 if (stack_usage_map
[i
]
3730 /* Don't store things in the fixed argument area at this
3731 point; it has already been saved. */
3732 && i
> reg_parm_stack_space
)
3735 if (i
!= upper_bound
)
3737 /* We need to make a save area. See what mode we can make
3739 enum machine_mode save_mode
3740 = mode_for_size (argvec
[argnum
].size
.constant
3748 plus_constant (argblock
,
3749 argvec
[argnum
].offset
.constant
)));
3750 argvec
[argnum
].save_area
= gen_reg_rtx (save_mode
);
3752 emit_move_insn (argvec
[argnum
].save_area
, stack_area
);
3756 emit_push_insn (val
, mode
, NULL_TREE
, NULL_RTX
, 0, partial
, reg
, 0,
3757 argblock
, GEN_INT (argvec
[argnum
].offset
.constant
),
3758 reg_parm_stack_space
, ARGS_SIZE_RTX (alignment_pad
));
3760 /* Now mark the segment we just used. */
3761 if (ACCUMULATE_OUTGOING_ARGS
)
3762 for (i
= lower_bound
; i
< upper_bound
; i
++)
3763 stack_usage_map
[i
] = 1;
3769 #ifdef PREFERRED_STACK_BOUNDARY
3770 /* If we pushed args in forward order, perform stack alignment
3771 after pushing the last arg. */
3772 if (argblock
== 0 && !PUSH_ARGS_REVERSED
)
3773 anti_adjust_stack (GEN_INT (args_size
.constant
3774 - original_args_size
.constant
));
3777 if (PUSH_ARGS_REVERSED
)
3782 fun
= prepare_call_address (fun
, NULL_TREE
, &call_fusage
, 0);
3784 /* Now load any reg parms into their regs. */
3786 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3787 are to be pushed. */
3788 for (count
= 0; count
< nargs
; count
++, argnum
+= inc
)
3790 register rtx val
= argvec
[argnum
].value
;
3791 rtx reg
= argvec
[argnum
].reg
;
3792 int partial
= argvec
[argnum
].partial
;
3794 /* Handle calls that pass values in multiple non-contiguous
3795 locations. The PA64 has examples of this for library calls. */
3796 if (reg
!= 0 && GET_CODE (reg
) == PARALLEL
)
3797 emit_group_load (reg
, val
,
3798 GET_MODE_SIZE (GET_MODE (val
)),
3799 GET_MODE_ALIGNMENT (GET_MODE (val
)));
3800 else if (reg
!= 0 && partial
== 0)
3801 emit_move_insn (reg
, val
);
3806 /* Any regs containing parms remain in use through the call. */
3807 for (count
= 0; count
< nargs
; count
++)
3809 rtx reg
= argvec
[count
].reg
;
3810 if (reg
!= 0 && GET_CODE (reg
) == PARALLEL
)
3811 use_group_regs (&call_fusage
, reg
);
3813 use_reg (&call_fusage
, reg
);
3816 /* Pass the function the address in which to return a structure value. */
3817 if (mem_value
!= 0 && struct_value_rtx
!= 0 && ! pcc_struct_value
)
3819 emit_move_insn (struct_value_rtx
,
3821 force_operand (XEXP (mem_value
, 0),
3823 if (GET_CODE (struct_value_rtx
) == REG
)
3824 use_reg (&call_fusage
, struct_value_rtx
);
3827 /* Don't allow popping to be deferred, since then
3828 cse'ing of library calls could delete a call and leave the pop. */
3830 valreg
= (mem_value
== 0 && outmode
!= VOIDmode
3831 ? hard_libcall_value (outmode
) : NULL_RTX
);
3833 #ifdef PREFERRED_STACK_BOUNDARY
3834 /* Stack must be properly aligned now. */
3835 if (stack_pointer_delta
& (PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
- 1))
3839 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3840 will set inhibit_defer_pop to that value. */
3841 /* The return type is needed to decide how many bytes the function pops.
3842 Signedness plays no role in that, so for simplicity, we pretend it's
3843 always signed. We also assume that the list of arguments passed has
3844 no impact, so we pretend it is unknown. */
3847 get_identifier (XSTR (orgfun
, 0)),
3848 build_function_type (outmode
== VOIDmode
? void_type_node
3849 : type_for_mode (outmode
, 0), NULL_TREE
),
3850 original_args_size
.constant
, args_size
.constant
,
3852 FUNCTION_ARG (args_so_far
, VOIDmode
, void_type_node
, 1),
3854 old_inhibit_defer_pop
+ 1, call_fusage
, flags
);
3856 /* Now restore inhibit_defer_pop to its actual original value. */
3859 /* If call is cse'able, make appropriate pair of reg-notes around it.
3860 Test valreg so we don't crash; may safely ignore `const'
3861 if return type is void. Disable for PARALLEL return values, because
3862 we have no way to move such values into a pseudo register. */
3863 if ((flags
& (ECF_CONST
| ECF_PURE
))
3864 && valreg
!= 0 && GET_CODE (valreg
) != PARALLEL
)
3867 rtx temp
= gen_reg_rtx (GET_MODE (valreg
));
3871 /* Construct an "equal form" for the value which mentions all the
3872 arguments in order as well as the function name. */
3873 for (i
= 0; i
< nargs
; i
++)
3874 note
= gen_rtx_EXPR_LIST (VOIDmode
, argvec
[i
].value
, note
);
3875 note
= gen_rtx_EXPR_LIST (VOIDmode
, fun
, note
);
3877 insns
= get_insns ();
3880 if (flags
& ECF_PURE
)
3881 note
= gen_rtx_EXPR_LIST (VOIDmode
,
3882 gen_rtx_USE (VOIDmode
,
3883 gen_rtx_MEM (BLKmode
,
3884 gen_rtx_SCRATCH (VOIDmode
))), note
);
3886 emit_libcall_block (insns
, temp
, valreg
, note
);
3890 else if (flags
& (ECF_CONST
| ECF_PURE
))
3892 /* Otherwise, just write out the sequence without a note. */
3893 rtx insns
= get_insns ();
3900 /* Copy the value to the right place. */
3901 if (outmode
!= VOIDmode
&& retval
)
3907 if (value
!= mem_value
)
3908 emit_move_insn (value
, mem_value
);
3910 else if (value
!= 0)
3911 emit_move_insn (value
, hard_libcall_value (outmode
));
3913 value
= hard_libcall_value (outmode
);
3916 if (ACCUMULATE_OUTGOING_ARGS
)
3918 #ifdef REG_PARM_STACK_SPACE
3921 enum machine_mode save_mode
= GET_MODE (save_area
);
3922 #ifdef ARGS_GROW_DOWNWARD
3924 = gen_rtx_MEM (save_mode
,
3925 memory_address (save_mode
,
3926 plus_constant (argblock
,
3930 = gen_rtx_MEM (save_mode
,
3931 memory_address (save_mode
,
3932 plus_constant (argblock
, low_to_save
)));
3934 if (save_mode
!= BLKmode
)
3935 emit_move_insn (stack_area
, save_area
);
3937 emit_block_move (stack_area
, validize_mem (save_area
),
3938 GEN_INT (high_to_save
- low_to_save
+ 1),
3943 /* If we saved any argument areas, restore them. */
3944 for (count
= 0; count
< nargs
; count
++)
3945 if (argvec
[count
].save_area
)
3947 enum machine_mode save_mode
= GET_MODE (argvec
[count
].save_area
);
3949 = gen_rtx_MEM (save_mode
,
3952 plus_constant (argblock
,
3953 argvec
[count
].offset
.constant
)));
3955 emit_move_insn (stack_area
, argvec
[count
].save_area
);
3958 highest_outgoing_arg_in_use
= initial_highest_arg_in_use
;
3959 stack_usage_map
= initial_stack_usage_map
;
3966 /* Output a library call to function FUN (a SYMBOL_REF rtx)
3967 (emitting the queue unless NO_QUEUE is nonzero),
3968 for a value of mode OUTMODE,
3969 with NARGS different arguments, passed as alternating rtx values
3970 and machine_modes to convert them to.
3971 The rtx values should have been passed through protect_from_queue already.
3973 FN_TYPE will is zero for `normal' calls, one for `const' calls, wich
3974 which will be enclosed in REG_LIBCALL/REG_RETVAL notes and two for `pure'
3975 calls, that are handled like `const' calls with extra
3976 (use (memory (scratch)). */
3979 emit_library_call
VPARAMS((rtx orgfun
, int fn_type
, enum machine_mode outmode
,
3982 #ifndef ANSI_PROTOTYPES
3985 enum machine_mode outmode
;
3990 VA_START (p
, nargs
);
3992 #ifndef ANSI_PROTOTYPES
3993 orgfun
= va_arg (p
, rtx
);
3994 fn_type
= va_arg (p
, int);
3995 outmode
= va_arg (p
, enum machine_mode
);
3996 nargs
= va_arg (p
, int);
3999 emit_library_call_value_1 (0, orgfun
, NULL_RTX
, fn_type
, outmode
, nargs
, p
);
4004 /* Like emit_library_call except that an extra argument, VALUE,
4005 comes second and says where to store the result.
4006 (If VALUE is zero, this function chooses a convenient way
4007 to return the value.
4009 This function returns an rtx for where the value is to be found.
4010 If VALUE is nonzero, VALUE is returned. */
4013 emit_library_call_value
VPARAMS((rtx orgfun
, rtx value
, int fn_type
,
4014 enum machine_mode outmode
, int nargs
, ...))
4016 #ifndef ANSI_PROTOTYPES
4020 enum machine_mode outmode
;
4025 VA_START (p
, nargs
);
4027 #ifndef ANSI_PROTOTYPES
4028 orgfun
= va_arg (p
, rtx
);
4029 value
= va_arg (p
, rtx
);
4030 fn_type
= va_arg (p
, int);
4031 outmode
= va_arg (p
, enum machine_mode
);
4032 nargs
= va_arg (p
, int);
4035 value
= emit_library_call_value_1 (1, orgfun
, value
, fn_type
, outmode
, nargs
, p
);
4043 /* Return an rtx which represents a suitable home on the stack
4044 given TYPE, the type of the argument looking for a home.
4045 This is called only for BLKmode arguments.
4047 SIZE is the size needed for this target.
4048 ARGS_ADDR is the address of the bottom of the argument block for this call.
4049 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
4050 if this machine uses push insns. */
4053 target_for_arg (type
, size
, args_addr
, offset
)
4057 struct args_size offset
;
4060 rtx offset_rtx
= ARGS_SIZE_RTX (offset
);
4062 /* We do not call memory_address if possible,
4063 because we want to address as close to the stack
4064 as possible. For non-variable sized arguments,
4065 this will be stack-pointer relative addressing. */
4066 if (GET_CODE (offset_rtx
) == CONST_INT
)
4067 target
= plus_constant (args_addr
, INTVAL (offset_rtx
));
4070 /* I have no idea how to guarantee that this
4071 will work in the presence of register parameters. */
4072 target
= gen_rtx_PLUS (Pmode
, args_addr
, offset_rtx
);
4073 target
= memory_address (QImode
, target
);
4076 return gen_rtx_MEM (BLKmode
, target
);
4080 /* Store a single argument for a function call
4081 into the register or memory area where it must be passed.
4082 *ARG describes the argument value and where to pass it.
4084 ARGBLOCK is the address of the stack-block for all the arguments,
4085 or 0 on a machine where arguments are pushed individually.
4087 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4088 so must be careful about how the stack is used.
4090 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4091 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4092 that we need not worry about saving and restoring the stack.
4094 FNDECL is the declaration of the function we are calling. */
4097 store_one_arg (arg
, argblock
, flags
, variable_size
,
4098 reg_parm_stack_space
)
4099 struct arg_data
*arg
;
4102 int variable_size ATTRIBUTE_UNUSED
;
4103 int reg_parm_stack_space
;
4105 register tree pval
= arg
->tree_value
;
4109 int i
, lower_bound
= 0, upper_bound
= 0;
4111 if (TREE_CODE (pval
) == ERROR_MARK
)
4114 /* Push a new temporary level for any temporaries we make for
4118 if (ACCUMULATE_OUTGOING_ARGS
&& !(flags
& ECF_SIBCALL
))
4120 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4121 save any previous data at that location. */
4122 if (argblock
&& ! variable_size
&& arg
->stack
)
4124 #ifdef ARGS_GROW_DOWNWARD
4125 /* stack_slot is negative, but we want to index stack_usage_map
4126 with positive values. */
4127 if (GET_CODE (XEXP (arg
->stack_slot
, 0)) == PLUS
)
4128 upper_bound
= -INTVAL (XEXP (XEXP (arg
->stack_slot
, 0), 1)) + 1;
4132 lower_bound
= upper_bound
- arg
->size
.constant
;
4134 if (GET_CODE (XEXP (arg
->stack_slot
, 0)) == PLUS
)
4135 lower_bound
= INTVAL (XEXP (XEXP (arg
->stack_slot
, 0), 1));
4139 upper_bound
= lower_bound
+ arg
->size
.constant
;
4142 for (i
= lower_bound
; i
< upper_bound
; i
++)
4143 if (stack_usage_map
[i
]
4144 /* Don't store things in the fixed argument area at this point;
4145 it has already been saved. */
4146 && i
> reg_parm_stack_space
)
4149 if (i
!= upper_bound
)
4151 /* We need to make a save area. See what mode we can make it. */
4152 enum machine_mode save_mode
4153 = mode_for_size (arg
->size
.constant
* BITS_PER_UNIT
, MODE_INT
, 1);
4155 = gen_rtx_MEM (save_mode
,
4156 memory_address (save_mode
,
4157 XEXP (arg
->stack_slot
, 0)));
4159 if (save_mode
== BLKmode
)
4161 arg
->save_area
= assign_stack_temp (BLKmode
,
4162 arg
->size
.constant
, 0);
4163 MEM_SET_IN_STRUCT_P (arg
->save_area
,
4164 AGGREGATE_TYPE_P (TREE_TYPE
4165 (arg
->tree_value
)));
4166 preserve_temp_slots (arg
->save_area
);
4167 emit_block_move (validize_mem (arg
->save_area
), stack_area
,
4168 GEN_INT (arg
->size
.constant
),
4173 arg
->save_area
= gen_reg_rtx (save_mode
);
4174 emit_move_insn (arg
->save_area
, stack_area
);
4178 /* Now that we have saved any slots that will be overwritten by this
4179 store, mark all slots this store will use. We must do this before
4180 we actually expand the argument since the expansion itself may
4181 trigger library calls which might need to use the same stack slot. */
4182 if (argblock
&& ! variable_size
&& arg
->stack
)
4183 for (i
= lower_bound
; i
< upper_bound
; i
++)
4184 stack_usage_map
[i
] = 1;
4187 /* If this isn't going to be placed on both the stack and in registers,
4188 set up the register and number of words. */
4189 if (! arg
->pass_on_stack
)
4190 reg
= arg
->reg
, partial
= arg
->partial
;
4192 if (reg
!= 0 && partial
== 0)
4193 /* Being passed entirely in a register. We shouldn't be called in
4197 /* If this arg needs special alignment, don't load the registers
4199 if (arg
->n_aligned_regs
!= 0)
4202 /* If this is being passed partially in a register, we can't evaluate
4203 it directly into its stack slot. Otherwise, we can. */
4204 if (arg
->value
== 0)
4206 /* stack_arg_under_construction is nonzero if a function argument is
4207 being evaluated directly into the outgoing argument list and
4208 expand_call must take special action to preserve the argument list
4209 if it is called recursively.
4211 For scalar function arguments stack_usage_map is sufficient to
4212 determine which stack slots must be saved and restored. Scalar
4213 arguments in general have pass_on_stack == 0.
4215 If this argument is initialized by a function which takes the
4216 address of the argument (a C++ constructor or a C function
4217 returning a BLKmode structure), then stack_usage_map is
4218 insufficient and expand_call must push the stack around the
4219 function call. Such arguments have pass_on_stack == 1.
4221 Note that it is always safe to set stack_arg_under_construction,
4222 but this generates suboptimal code if set when not needed. */
4224 if (arg
->pass_on_stack
)
4225 stack_arg_under_construction
++;
4227 arg
->value
= expand_expr (pval
,
4229 || TYPE_MODE (TREE_TYPE (pval
)) != arg
->mode
)
4230 ? NULL_RTX
: arg
->stack
,
4233 /* If we are promoting object (or for any other reason) the mode
4234 doesn't agree, convert the mode. */
4236 if (arg
->mode
!= TYPE_MODE (TREE_TYPE (pval
)))
4237 arg
->value
= convert_modes (arg
->mode
, TYPE_MODE (TREE_TYPE (pval
)),
4238 arg
->value
, arg
->unsignedp
);
4240 if (arg
->pass_on_stack
)
4241 stack_arg_under_construction
--;
4244 /* Don't allow anything left on stack from computation
4245 of argument to alloca. */
4246 if (flags
& ECF_MAY_BE_ALLOCA
)
4247 do_pending_stack_adjust ();
4249 if (arg
->value
== arg
->stack
)
4251 /* If the value is already in the stack slot, we are done. */
4252 if (current_function_check_memory_usage
&& GET_CODE (arg
->stack
) == MEM
)
4254 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
4255 XEXP (arg
->stack
, 0), Pmode
,
4256 ARGS_SIZE_RTX (arg
->size
),
4257 TYPE_MODE (sizetype
),
4258 GEN_INT (MEMORY_USE_RW
),
4259 TYPE_MODE (integer_type_node
));
4262 else if (arg
->mode
!= BLKmode
)
4266 /* Argument is a scalar, not entirely passed in registers.
4267 (If part is passed in registers, arg->partial says how much
4268 and emit_push_insn will take care of putting it there.)
4270 Push it, and if its size is less than the
4271 amount of space allocated to it,
4272 also bump stack pointer by the additional space.
4273 Note that in C the default argument promotions
4274 will prevent such mismatches. */
4276 size
= GET_MODE_SIZE (arg
->mode
);
4277 /* Compute how much space the push instruction will push.
4278 On many machines, pushing a byte will advance the stack
4279 pointer by a halfword. */
4280 #ifdef PUSH_ROUNDING
4281 size
= PUSH_ROUNDING (size
);
4285 /* Compute how much space the argument should get:
4286 round up to a multiple of the alignment for arguments. */
4287 if (none
!= FUNCTION_ARG_PADDING (arg
->mode
, TREE_TYPE (pval
)))
4288 used
= (((size
+ PARM_BOUNDARY
/ BITS_PER_UNIT
- 1)
4289 / (PARM_BOUNDARY
/ BITS_PER_UNIT
))
4290 * (PARM_BOUNDARY
/ BITS_PER_UNIT
));
4292 /* This isn't already where we want it on the stack, so put it there.
4293 This can either be done with push or copy insns. */
4294 emit_push_insn (arg
->value
, arg
->mode
, TREE_TYPE (pval
), NULL_RTX
, 0,
4295 partial
, reg
, used
- size
, argblock
,
4296 ARGS_SIZE_RTX (arg
->offset
), reg_parm_stack_space
,
4297 ARGS_SIZE_RTX (arg
->alignment_pad
));
4301 /* BLKmode, at least partly to be pushed. */
4303 register int excess
;
4306 /* Pushing a nonscalar.
4307 If part is passed in registers, PARTIAL says how much
4308 and emit_push_insn will take care of putting it there. */
4310 /* Round its size up to a multiple
4311 of the allocation unit for arguments. */
4313 if (arg
->size
.var
!= 0)
4316 size_rtx
= ARGS_SIZE_RTX (arg
->size
);
4320 /* PUSH_ROUNDING has no effect on us, because
4321 emit_push_insn for BLKmode is careful to avoid it. */
4322 excess
= (arg
->size
.constant
- int_size_in_bytes (TREE_TYPE (pval
))
4323 + partial
* UNITS_PER_WORD
);
4324 size_rtx
= expr_size (pval
);
4327 emit_push_insn (arg
->value
, arg
->mode
, TREE_TYPE (pval
), size_rtx
,
4328 TYPE_ALIGN (TREE_TYPE (pval
)), partial
, reg
, excess
,
4329 argblock
, ARGS_SIZE_RTX (arg
->offset
),
4330 reg_parm_stack_space
,
4331 ARGS_SIZE_RTX (arg
->alignment_pad
));
4335 /* Unless this is a partially-in-register argument, the argument is now
4338 ??? Note that this can change arg->value from arg->stack to
4339 arg->stack_slot and it matters when they are not the same.
4340 It isn't totally clear that this is correct in all cases. */
4342 arg
->value
= arg
->stack_slot
;
4344 /* Once we have pushed something, pops can't safely
4345 be deferred during the rest of the arguments. */
4348 /* ANSI doesn't require a sequence point here,
4349 but PCC has one, so this will avoid some problems. */
4352 /* Free any temporary slots made in processing this argument. Show
4353 that we might have taken the address of something and pushed that
4355 preserve_temp_slots (NULL_RTX
);