1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 92-97, 1998, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
29 #include "insn-flags.h"
34 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
35 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
38 /* Decide whether a function's arguments should be processed
39 from first to last or from last to first.
41 They should if the stack and args grow in opposite directions, but
42 only if we have push insns. */
46 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
47 #define PUSH_ARGS_REVERSED /* If it's last to first */
52 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
53 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
55 /* Data structure and subroutines used within expand_call. */
59 /* Tree node for this argument. */
61 /* Mode for value; TYPE_MODE unless promoted. */
62 enum machine_mode mode
;
63 /* Current RTL value for argument, or 0 if it isn't precomputed. */
65 /* Initially-compute RTL value for argument; only for const functions. */
67 /* Register to pass this argument in, 0 if passed on stack, or an
68 PARALLEL if the arg is to be copied into multiple non-contiguous
71 /* If REG was promoted from the actual mode of the argument expression,
72 indicates whether the promotion is sign- or zero-extended. */
74 /* Number of registers to use. 0 means put the whole arg in registers.
75 Also 0 if not passed in registers. */
77 /* Non-zero if argument must be passed on stack.
78 Note that some arguments may be passed on the stack
79 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
80 pass_on_stack identifies arguments that *cannot* go in registers. */
82 /* Offset of this argument from beginning of stack-args. */
83 struct args_size offset
;
84 /* Similar, but offset to the start of the stack slot. Different from
85 OFFSET if this arg pads downward. */
86 struct args_size slot_offset
;
87 /* Size of this argument on the stack, rounded up for any padding it gets,
88 parts of the argument passed in registers do not count.
89 If REG_PARM_STACK_SPACE is defined, then register parms
90 are counted here as well. */
91 struct args_size size
;
92 /* Location on the stack at which parameter should be stored. The store
93 has already been done if STACK == VALUE. */
95 /* Location on the stack of the start of this argument slot. This can
96 differ from STACK if this arg pads downward. This location is known
97 to be aligned to FUNCTION_ARG_BOUNDARY. */
99 #ifdef ACCUMULATE_OUTGOING_ARGS
100 /* Place that this stack area has been saved, if needed. */
103 /* If an argument's alignment does not permit direct copying into registers,
104 copy in smaller-sized pieces into pseudos. These are stored in a
105 block pointed to by this field. The next field says how many
106 word-sized pseudos we made. */
109 /* The amount that the stack pointer needs to be adjusted to
110 force alignment for the next argument. */
111 struct args_size alignment_pad
;
114 #ifdef ACCUMULATE_OUTGOING_ARGS
115 /* A vector of one char per byte of stack space. A byte if non-zero if
116 the corresponding stack location has been used.
117 This vector is used to prevent a function call within an argument from
118 clobbering any stack already set up. */
119 static char *stack_usage_map
;
121 /* Size of STACK_USAGE_MAP. */
122 static int highest_outgoing_arg_in_use
;
124 /* stack_arg_under_construction is nonzero when an argument may be
125 initialized with a constructor call (including a C function that
126 returns a BLKmode struct) and expand_call must take special action
127 to make sure the object being constructed does not overlap the
128 argument list for the constructor call. */
129 int stack_arg_under_construction
;
132 static int calls_function
PROTO ((tree
, int));
133 static int calls_function_1
PROTO ((tree
, int));
134 static void emit_call_1
PROTO ((rtx
, tree
, tree
, HOST_WIDE_INT
,
135 HOST_WIDE_INT
, HOST_WIDE_INT
, rtx
,
136 rtx
, int, rtx
, int));
137 static void precompute_register_parameters
PROTO ((int, struct arg_data
*,
139 static void store_one_arg
PROTO ((struct arg_data
*, rtx
, int, int,
141 static void store_unaligned_arguments_into_pseudos
PROTO ((struct arg_data
*,
143 static int finalize_must_preallocate
PROTO ((int, int,
145 struct args_size
*));
146 static void precompute_arguments
PROTO ((int, int, int,
148 struct args_size
*));
149 static int compute_argument_block_size
PROTO ((int,
150 struct args_size
*));
151 static void initialize_argument_information
PROTO ((int,
158 static void compute_argument_addresses
PROTO ((struct arg_data
*,
160 static rtx rtx_for_function_call
PROTO ((tree
, tree
));
161 static void load_register_parameters
PROTO ((struct arg_data
*,
164 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
165 static rtx save_fixed_argument_area
PROTO ((int, rtx
, int *, int *));
166 static void restore_fixed_argument_area
PROTO ((rtx
, rtx
, int, int));
169 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
172 If WHICH is 0, return 1 if EXP contains a call to any function.
173 Actually, we only need return 1 if evaluating EXP would require pushing
174 arguments on the stack, but that is too difficult to compute, so we just
175 assume any function call might require the stack. */
177 static tree calls_function_save_exprs
;
180 calls_function (exp
, which
)
185 calls_function_save_exprs
= 0;
186 val
= calls_function_1 (exp
, which
);
187 calls_function_save_exprs
= 0;
192 calls_function_1 (exp
, which
)
197 enum tree_code code
= TREE_CODE (exp
);
198 int type
= TREE_CODE_CLASS (code
);
199 int length
= tree_code_length
[(int) code
];
201 /* If this code is language-specific, we don't know what it will do. */
202 if ((int) code
>= NUM_TREE_CODES
)
205 /* Only expressions and references can contain calls. */
206 if (type
!= 'e' && type
!= '<' && type
!= '1' && type
!= '2' && type
!= 'r'
215 else if (TREE_CODE (TREE_OPERAND (exp
, 0)) == ADDR_EXPR
216 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
219 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
221 if ((DECL_BUILT_IN (fndecl
)
222 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
223 && DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_ALLOCA
)
224 || (DECL_SAVED_INSNS (fndecl
)
225 && DECL_SAVED_INSNS (fndecl
)->calls_alloca
))
229 /* Third operand is RTL. */
234 if (SAVE_EXPR_RTL (exp
) != 0)
236 if (value_member (exp
, calls_function_save_exprs
))
238 calls_function_save_exprs
= tree_cons (NULL_TREE
, exp
,
239 calls_function_save_exprs
);
240 return (TREE_OPERAND (exp
, 0) != 0
241 && calls_function_1 (TREE_OPERAND (exp
, 0), which
));
247 for (local
= BLOCK_VARS (exp
); local
; local
= TREE_CHAIN (local
))
248 if (DECL_INITIAL (local
) != 0
249 && calls_function_1 (DECL_INITIAL (local
), which
))
253 register tree subblock
;
255 for (subblock
= BLOCK_SUBBLOCKS (exp
);
257 subblock
= TREE_CHAIN (subblock
))
258 if (calls_function_1 (subblock
, which
))
263 case METHOD_CALL_EXPR
:
267 case WITH_CLEANUP_EXPR
:
278 for (i
= 0; i
< length
; i
++)
279 if (TREE_OPERAND (exp
, i
) != 0
280 && calls_function_1 (TREE_OPERAND (exp
, i
), which
))
286 /* Force FUNEXP into a form suitable for the address of a CALL,
287 and return that as an rtx. Also load the static chain register
288 if FNDECL is a nested function.
290 CALL_FUSAGE points to a variable holding the prospective
291 CALL_INSN_FUNCTION_USAGE information. */
294 prepare_call_address (funexp
, fndecl
, call_fusage
, reg_parm_seen
)
300 rtx static_chain_value
= 0;
302 funexp
= protect_from_queue (funexp
, 0);
305 /* Get possible static chain value for nested function in C. */
306 static_chain_value
= lookup_static_chain (fndecl
);
308 /* Make a valid memory address and copy constants thru pseudo-regs,
309 but not for a constant address if -fno-function-cse. */
310 if (GET_CODE (funexp
) != SYMBOL_REF
)
311 /* If we are using registers for parameters, force the
312 function address into a register now. */
313 funexp
= ((SMALL_REGISTER_CLASSES
&& reg_parm_seen
)
314 ? force_not_mem (memory_address (FUNCTION_MODE
, funexp
))
315 : memory_address (FUNCTION_MODE
, funexp
));
318 #ifndef NO_FUNCTION_CSE
319 if (optimize
&& ! flag_no_function_cse
)
320 #ifdef NO_RECURSIVE_FUNCTION_CSE
321 if (fndecl
!= current_function_decl
)
323 funexp
= force_reg (Pmode
, funexp
);
327 if (static_chain_value
!= 0)
329 emit_move_insn (static_chain_rtx
, static_chain_value
);
331 if (GET_CODE (static_chain_rtx
) == REG
)
332 use_reg (call_fusage
, static_chain_rtx
);
338 /* Generate instructions to call function FUNEXP,
339 and optionally pop the results.
340 The CALL_INSN is the first insn generated.
342 FNDECL is the declaration node of the function. This is given to the
343 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
345 FUNTYPE is the data type of the function. This is given to the macro
346 RETURN_POPS_ARGS to determine whether this function pops its own args.
347 We used to allow an identifier for library functions, but that doesn't
348 work when the return type is an aggregate type and the calling convention
349 says that the pointer to this aggregate is to be popped by the callee.
351 STACK_SIZE is the number of bytes of arguments on the stack,
352 rounded up to PREFERRED_STACK_BOUNDARY; zero if the size is variable.
353 This is both to put into the call insn and
354 to generate explicit popping code if necessary.
356 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
357 It is zero if this call doesn't want a structure value.
359 NEXT_ARG_REG is the rtx that results from executing
360 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
361 just after all the args have had their registers assigned.
362 This could be whatever you like, but normally it is the first
363 arg-register beyond those used for args in this call,
364 or 0 if all the arg-registers are used in this call.
365 It is passed on to `gen_call' so you can put this info in the call insn.
367 VALREG is a hard register in which a value is returned,
368 or 0 if the call does not return a value.
370 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
371 the args to this call were processed.
372 We restore `inhibit_defer_pop' to that value.
374 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
375 denote registers used by the called function.
377 IS_CONST is true if this is a `const' call. */
380 emit_call_1 (funexp
, fndecl
, funtype
, stack_size
, rounded_stack_size
,
381 struct_value_size
, next_arg_reg
, valreg
, old_inhibit_defer_pop
,
382 call_fusage
, is_const
)
384 tree fndecl ATTRIBUTE_UNUSED
;
385 tree funtype ATTRIBUTE_UNUSED
;
386 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED
;
387 HOST_WIDE_INT rounded_stack_size
;
388 HOST_WIDE_INT struct_value_size
;
391 int old_inhibit_defer_pop
;
395 rtx rounded_stack_size_rtx
= GEN_INT (rounded_stack_size
);
396 rtx struct_value_size_rtx
= GEN_INT (struct_value_size
);
398 #ifndef ACCUMULATE_OUTGOING_ARGS
399 int already_popped
= 0;
400 HOST_WIDE_INT n_popped
= RETURN_POPS_ARGS (fndecl
, funtype
, stack_size
);
403 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
404 and we don't want to load it into a register as an optimization,
405 because prepare_call_address already did it if it should be done. */
406 if (GET_CODE (funexp
) != SYMBOL_REF
)
407 funexp
= memory_address (FUNCTION_MODE
, funexp
);
409 #ifndef ACCUMULATE_OUTGOING_ARGS
410 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
411 /* If the target has "call" or "call_value" insns, then prefer them
412 if no arguments are actually popped. If the target does not have
413 "call" or "call_value" insns, then we must use the popping versions
414 even if the call has no arguments to pop. */
415 #if defined (HAVE_call) && defined (HAVE_call_value)
416 if (HAVE_call
&& HAVE_call_value
&& HAVE_call_pop
&& HAVE_call_value_pop
419 if (HAVE_call_pop
&& HAVE_call_value_pop
)
422 rtx n_pop
= GEN_INT (n_popped
);
425 /* If this subroutine pops its own args, record that in the call insn
426 if possible, for the sake of frame pointer elimination. */
429 pat
= gen_call_value_pop (valreg
,
430 gen_rtx_MEM (FUNCTION_MODE
, funexp
),
431 rounded_stack_size_rtx
, next_arg_reg
, n_pop
);
433 pat
= gen_call_pop (gen_rtx_MEM (FUNCTION_MODE
, funexp
),
434 rounded_stack_size_rtx
, next_arg_reg
, n_pop
);
436 emit_call_insn (pat
);
443 #if defined (HAVE_call) && defined (HAVE_call_value)
444 if (HAVE_call
&& HAVE_call_value
)
447 emit_call_insn (gen_call_value (valreg
,
448 gen_rtx_MEM (FUNCTION_MODE
, funexp
),
449 rounded_stack_size_rtx
, next_arg_reg
,
452 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE
, funexp
),
453 rounded_stack_size_rtx
, next_arg_reg
,
454 struct_value_size_rtx
));
460 /* Find the CALL insn we just emitted. */
461 for (call_insn
= get_last_insn ();
462 call_insn
&& GET_CODE (call_insn
) != CALL_INSN
;
463 call_insn
= PREV_INSN (call_insn
))
469 /* Put the register usage information on the CALL. If there is already
470 some usage information, put ours at the end. */
471 if (CALL_INSN_FUNCTION_USAGE (call_insn
))
475 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
); XEXP (link
, 1) != 0;
476 link
= XEXP (link
, 1))
479 XEXP (link
, 1) = call_fusage
;
482 CALL_INSN_FUNCTION_USAGE (call_insn
) = call_fusage
;
484 /* If this is a const call, then set the insn's unchanging bit. */
486 CONST_CALL_P (call_insn
) = 1;
488 /* Restore this now, so that we do defer pops for this call's args
489 if the context of the call as a whole permits. */
490 inhibit_defer_pop
= old_inhibit_defer_pop
;
492 #ifndef ACCUMULATE_OUTGOING_ARGS
493 /* If returning from the subroutine does not automatically pop the args,
494 we need an instruction to pop them sooner or later.
495 Perhaps do it now; perhaps just record how much space to pop later.
497 If returning from the subroutine does pop the args, indicate that the
498 stack pointer will be changed. */
503 CALL_INSN_FUNCTION_USAGE (call_insn
)
504 = gen_rtx_EXPR_LIST (VOIDmode
,
505 gen_rtx_CLOBBER (VOIDmode
, stack_pointer_rtx
),
506 CALL_INSN_FUNCTION_USAGE (call_insn
));
507 rounded_stack_size
-= n_popped
;
508 rounded_stack_size_rtx
= GEN_INT (rounded_stack_size
);
511 if (rounded_stack_size
!= 0)
513 if (flag_defer_pop
&& inhibit_defer_pop
== 0 && !is_const
)
514 pending_stack_adjust
+= rounded_stack_size
;
516 adjust_stack (rounded_stack_size_rtx
);
521 /* Determine if the function identified by NAME and FNDECL is one with
522 special properties we wish to know about.
524 For example, if the function might return more than one time (setjmp), then
525 set RETURNS_TWICE to a nonzero value.
527 Similarly set IS_LONGJMP for if the function is in the longjmp family.
529 Set IS_MALLOC for any of the standard memory allocation functions which
530 allocate from the heap.
532 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
533 space from the stack such as alloca. */
536 special_function_p (fndecl
, returns_twice
, is_longjmp
,
537 is_malloc
, may_be_alloca
)
548 /* The function decl may have the `malloc' attribute. */
549 *is_malloc
= fndecl
&& DECL_IS_MALLOC (fndecl
);
552 && fndecl
&& DECL_NAME (fndecl
)
553 && IDENTIFIER_LENGTH (DECL_NAME (fndecl
)) <= 17
554 /* Exclude functions not at the file scope, or not `extern',
555 since they are not the magic functions we would otherwise
557 && DECL_CONTEXT (fndecl
) == NULL_TREE
&& TREE_PUBLIC (fndecl
))
559 char *name
= IDENTIFIER_POINTER (DECL_NAME (fndecl
));
562 /* We assume that alloca will always be called by name. It
563 makes no sense to pass it as a pointer-to-function to
564 anything that does not understand its behavior. */
566 = (((IDENTIFIER_LENGTH (DECL_NAME (fndecl
)) == 6
568 && ! strcmp (name
, "alloca"))
569 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl
)) == 16
571 && ! strcmp (name
, "__builtin_alloca"))));
573 /* Disregard prefix _, __ or __x. */
576 if (name
[1] == '_' && name
[2] == 'x')
578 else if (name
[1] == '_')
588 && (! strcmp (tname
, "setjmp")
589 || ! strcmp (tname
, "setjmp_syscall")))
591 && ! strcmp (tname
, "sigsetjmp"))
593 && ! strcmp (tname
, "savectx")));
595 && ! strcmp (tname
, "siglongjmp"))
598 else if ((tname
[0] == 'q' && tname
[1] == 's'
599 && ! strcmp (tname
, "qsetjmp"))
600 || (tname
[0] == 'v' && tname
[1] == 'f'
601 && ! strcmp (tname
, "vfork")))
604 else if (tname
[0] == 'l' && tname
[1] == 'o'
605 && ! strcmp (tname
, "longjmp"))
607 /* Do not add any more malloc-like functions to this list,
608 instead mark them as malloc functions using the malloc attribute.
609 Note, realloc is not suitable for attribute malloc since
610 it may return the same address across multiple calls. */
611 else if (! strcmp (tname
, "malloc")
612 || ! strcmp (tname
, "calloc")
613 || ! strcmp (tname
, "strdup")
614 /* Note use of NAME rather than TNAME here. These functions
615 are only reserved when preceded with __. */
616 || ! strcmp (name
, "__vn") /* mangled __builtin_vec_new */
617 || ! strcmp (name
, "__nw") /* mangled __builtin_new */
618 || ! strcmp (name
, "__builtin_new")
619 || ! strcmp (name
, "__builtin_vec_new"))
624 /* Precompute all register parameters as described by ARGS, storing values
625 into fields within the ARGS array.
627 NUM_ACTUALS indicates the total number elements in the ARGS array.
629 Set REG_PARM_SEEN if we encounter a register parameter. */
632 precompute_register_parameters (num_actuals
, args
, reg_parm_seen
)
634 struct arg_data
*args
;
641 for (i
= 0; i
< num_actuals
; i
++)
642 if (args
[i
].reg
!= 0 && ! args
[i
].pass_on_stack
)
646 if (args
[i
].value
== 0)
649 args
[i
].value
= expand_expr (args
[i
].tree_value
, NULL_RTX
,
651 preserve_temp_slots (args
[i
].value
);
654 /* ANSI doesn't require a sequence point here,
655 but PCC has one, so this will avoid some problems. */
659 /* If we are to promote the function arg to a wider mode,
662 if (args
[i
].mode
!= TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)))
664 = convert_modes (args
[i
].mode
,
665 TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)),
666 args
[i
].value
, args
[i
].unsignedp
);
668 /* If the value is expensive, and we are inside an appropriately
669 short loop, put the value into a pseudo and then put the pseudo
672 For small register classes, also do this if this call uses
673 register parameters. This is to avoid reload conflicts while
674 loading the parameters registers. */
676 if ((! (GET_CODE (args
[i
].value
) == REG
677 || (GET_CODE (args
[i
].value
) == SUBREG
678 && GET_CODE (SUBREG_REG (args
[i
].value
)) == REG
)))
679 && args
[i
].mode
!= BLKmode
680 && rtx_cost (args
[i
].value
, SET
) > 2
681 && ((SMALL_REGISTER_CLASSES
&& *reg_parm_seen
)
682 || preserve_subexpressions_p ()))
683 args
[i
].value
= copy_to_mode_reg (args
[i
].mode
, args
[i
].value
);
687 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
689 /* The argument list is the property of the called routine and it
690 may clobber it. If the fixed area has been used for previous
691 parameters, we must save and restore it. */
693 save_fixed_argument_area (reg_parm_stack_space
, argblock
,
694 low_to_save
, high_to_save
)
695 int reg_parm_stack_space
;
701 rtx save_area
= NULL_RTX
;
703 /* Compute the boundary of the that needs to be saved, if any. */
704 #ifdef ARGS_GROW_DOWNWARD
705 for (i
= 0; i
< reg_parm_stack_space
+ 1; i
++)
707 for (i
= 0; i
< reg_parm_stack_space
; i
++)
710 if (i
>= highest_outgoing_arg_in_use
711 || stack_usage_map
[i
] == 0)
714 if (*low_to_save
== -1)
720 if (*low_to_save
>= 0)
722 int num_to_save
= *high_to_save
- *low_to_save
+ 1;
723 enum machine_mode save_mode
724 = mode_for_size (num_to_save
* BITS_PER_UNIT
, MODE_INT
, 1);
727 /* If we don't have the required alignment, must do this in BLKmode. */
728 if ((*low_to_save
& (MIN (GET_MODE_SIZE (save_mode
),
729 BIGGEST_ALIGNMENT
/ UNITS_PER_WORD
) - 1)))
732 #ifdef ARGS_GROW_DOWNWARD
733 stack_area
= gen_rtx_MEM (save_mode
,
734 memory_address (save_mode
,
735 plus_constant (argblock
,
738 stack_area
= gen_rtx_MEM (save_mode
,
739 memory_address (save_mode
,
740 plus_constant (argblock
,
743 if (save_mode
== BLKmode
)
745 save_area
= assign_stack_temp (BLKmode
, num_to_save
, 0);
746 /* Cannot use emit_block_move here because it can be done by a library
747 call which in turn gets into this place again and deadly infinite
748 recursion happens. */
749 move_by_pieces (validize_mem (save_area
), stack_area
, num_to_save
,
750 PARM_BOUNDARY
/ BITS_PER_UNIT
);
754 save_area
= gen_reg_rtx (save_mode
);
755 emit_move_insn (save_area
, stack_area
);
762 restore_fixed_argument_area (save_area
, argblock
, high_to_save
, low_to_save
)
768 enum machine_mode save_mode
= GET_MODE (save_area
);
769 #ifdef ARGS_GROW_DOWNWARD
771 = gen_rtx_MEM (save_mode
,
772 memory_address (save_mode
,
773 plus_constant (argblock
,
777 = gen_rtx_MEM (save_mode
,
778 memory_address (save_mode
,
779 plus_constant (argblock
,
783 if (save_mode
!= BLKmode
)
784 emit_move_insn (stack_area
, save_area
);
786 /* Cannot use emit_block_move here because it can be done by a library
787 call which in turn gets into this place again and deadly infinite
788 recursion happens. */
789 move_by_pieces (stack_area
, validize_mem (save_area
),
790 high_to_save
- low_to_save
+ 1,
791 PARM_BOUNDARY
/ BITS_PER_UNIT
);
795 /* If any elements in ARGS refer to parameters that are to be passed in
796 registers, but not in memory, and whose alignment does not permit a
797 direct copy into registers. Copy the values into a group of pseudos
798 which we will later copy into the appropriate hard registers.
800 Pseudos for each unaligned argument will be stored into the array
801 args[argnum].aligned_regs. The caller is responsible for deallocating
802 the aligned_regs array if it is nonzero. */
805 store_unaligned_arguments_into_pseudos (args
, num_actuals
)
806 struct arg_data
*args
;
811 for (i
= 0; i
< num_actuals
; i
++)
812 if (args
[i
].reg
!= 0 && ! args
[i
].pass_on_stack
813 && args
[i
].mode
== BLKmode
814 && (TYPE_ALIGN (TREE_TYPE (args
[i
].tree_value
))
815 < (unsigned int) MIN (BIGGEST_ALIGNMENT
, BITS_PER_WORD
)))
817 int bytes
= int_size_in_bytes (TREE_TYPE (args
[i
].tree_value
));
818 int big_endian_correction
= 0;
820 args
[i
].n_aligned_regs
821 = args
[i
].partial
? args
[i
].partial
822 : (bytes
+ (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
824 args
[i
].aligned_regs
= (rtx
*) xmalloc (sizeof (rtx
)
825 * args
[i
].n_aligned_regs
);
827 /* Structures smaller than a word are aligned to the least
828 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
829 this means we must skip the empty high order bytes when
830 calculating the bit offset. */
831 if (BYTES_BIG_ENDIAN
&& bytes
< UNITS_PER_WORD
)
832 big_endian_correction
= (BITS_PER_WORD
- (bytes
* BITS_PER_UNIT
));
834 for (j
= 0; j
< args
[i
].n_aligned_regs
; j
++)
836 rtx reg
= gen_reg_rtx (word_mode
);
837 rtx word
= operand_subword_force (args
[i
].value
, j
, BLKmode
);
838 int bitsize
= MIN (bytes
* BITS_PER_UNIT
, BITS_PER_WORD
);
839 int bitalign
= TYPE_ALIGN (TREE_TYPE (args
[i
].tree_value
));
841 args
[i
].aligned_regs
[j
] = reg
;
843 /* There is no need to restrict this code to loading items
844 in TYPE_ALIGN sized hunks. The bitfield instructions can
845 load up entire word sized registers efficiently.
847 ??? This may not be needed anymore.
848 We use to emit a clobber here but that doesn't let later
849 passes optimize the instructions we emit. By storing 0 into
850 the register later passes know the first AND to zero out the
851 bitfield being set in the register is unnecessary. The store
852 of 0 will be deleted as will at least the first AND. */
854 emit_move_insn (reg
, const0_rtx
);
856 bytes
-= bitsize
/ BITS_PER_UNIT
;
857 store_bit_field (reg
, bitsize
, big_endian_correction
, word_mode
,
858 extract_bit_field (word
, bitsize
, 0, 1,
861 bitalign
/ BITS_PER_UNIT
,
863 bitalign
/ BITS_PER_UNIT
, BITS_PER_WORD
);
868 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
871 NUM_ACTUALS is the total number of parameters.
873 N_NAMED_ARGS is the total number of named arguments.
875 FNDECL is the tree code for the target of this call (if known)
877 ARGS_SO_FAR holds state needed by the target to know where to place
880 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
881 for arguments which are passed in registers.
883 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
884 and may be modified by this routine.
886 OLD_PENDING_ADJ, MUST_PREALLOCATE and IS_CONST are pointers to integer
887 flags which may may be modified by this routine. */
890 initialize_argument_information (num_actuals
, args
, args_size
, n_named_args
,
891 actparms
, fndecl
, args_so_far
,
892 reg_parm_stack_space
, old_stack_level
,
893 old_pending_adj
, must_preallocate
, is_const
)
894 int num_actuals ATTRIBUTE_UNUSED
;
895 struct arg_data
*args
;
896 struct args_size
*args_size
;
897 int n_named_args ATTRIBUTE_UNUSED
;
900 CUMULATIVE_ARGS
*args_so_far
;
901 int reg_parm_stack_space
;
902 rtx
*old_stack_level
;
903 int *old_pending_adj
;
904 int *must_preallocate
;
907 /* 1 if scanning parms front to back, -1 if scanning back to front. */
910 /* Count arg position in order args appear. */
913 struct args_size alignment_pad
;
917 args_size
->constant
= 0;
920 /* In this loop, we consider args in the order they are written.
921 We fill up ARGS from the front or from the back if necessary
922 so that in any case the first arg to be pushed ends up at the front. */
924 #ifdef PUSH_ARGS_REVERSED
925 i
= num_actuals
- 1, inc
= -1;
926 /* In this case, must reverse order of args
927 so that we compute and push the last arg first. */
932 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
933 for (p
= actparms
, argpos
= 0; p
; p
= TREE_CHAIN (p
), i
+= inc
, argpos
++)
935 tree type
= TREE_TYPE (TREE_VALUE (p
));
937 enum machine_mode mode
;
939 args
[i
].tree_value
= TREE_VALUE (p
);
941 /* Replace erroneous argument with constant zero. */
942 if (type
== error_mark_node
|| TYPE_SIZE (type
) == 0)
943 args
[i
].tree_value
= integer_zero_node
, type
= integer_type_node
;
945 /* If TYPE is a transparent union, pass things the way we would
946 pass the first field of the union. We have already verified that
947 the modes are the same. */
948 if (TYPE_TRANSPARENT_UNION (type
))
949 type
= TREE_TYPE (TYPE_FIELDS (type
));
951 /* Decide where to pass this arg.
953 args[i].reg is nonzero if all or part is passed in registers.
955 args[i].partial is nonzero if part but not all is passed in registers,
956 and the exact value says how many words are passed in registers.
958 args[i].pass_on_stack is nonzero if the argument must at least be
959 computed on the stack. It may then be loaded back into registers
960 if args[i].reg is nonzero.
962 These decisions are driven by the FUNCTION_... macros and must agree
963 with those made by function.c. */
965 /* See if this argument should be passed by invisible reference. */
966 if ((TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
967 && contains_placeholder_p (TYPE_SIZE (type
)))
968 || TREE_ADDRESSABLE (type
)
969 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
970 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far
, TYPE_MODE (type
),
971 type
, argpos
< n_named_args
)
975 /* If we're compiling a thunk, pass through invisible
976 references instead of making a copy. */
977 if (current_function_is_thunk
978 #ifdef FUNCTION_ARG_CALLEE_COPIES
979 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far
, TYPE_MODE (type
),
980 type
, argpos
< n_named_args
)
981 /* If it's in a register, we must make a copy of it too. */
982 /* ??? Is this a sufficient test? Is there a better one? */
983 && !(TREE_CODE (args
[i
].tree_value
) == VAR_DECL
984 && REG_P (DECL_RTL (args
[i
].tree_value
)))
985 && ! TREE_ADDRESSABLE (type
))
989 /* C++ uses a TARGET_EXPR to indicate that we want to make a
990 new object from the argument. If we are passing by
991 invisible reference, the callee will do that for us, so we
992 can strip off the TARGET_EXPR. This is not always safe,
993 but it is safe in the only case where this is a useful
994 optimization; namely, when the argument is a plain object.
995 In that case, the frontend is just asking the backend to
996 make a bitwise copy of the argument. */
998 if (TREE_CODE (args
[i
].tree_value
) == TARGET_EXPR
999 && (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND
1000 (args
[i
].tree_value
, 1)))
1002 && ! REG_P (DECL_RTL (TREE_OPERAND (args
[i
].tree_value
, 1))))
1003 args
[i
].tree_value
= TREE_OPERAND (args
[i
].tree_value
, 1);
1005 args
[i
].tree_value
= build1 (ADDR_EXPR
,
1006 build_pointer_type (type
),
1007 args
[i
].tree_value
);
1008 type
= build_pointer_type (type
);
1012 /* We make a copy of the object and pass the address to the
1013 function being called. */
1016 if (TYPE_SIZE (type
) == 0
1017 || TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
1018 || (flag_stack_check
&& ! STACK_CHECK_BUILTIN
1019 && (TREE_INT_CST_HIGH (TYPE_SIZE (type
)) != 0
1020 || (TREE_INT_CST_LOW (TYPE_SIZE (type
))
1021 > STACK_CHECK_MAX_VAR_SIZE
* BITS_PER_UNIT
))))
1023 /* This is a variable-sized object. Make space on the stack
1025 rtx size_rtx
= expr_size (TREE_VALUE (p
));
1027 if (*old_stack_level
== 0)
1029 emit_stack_save (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
1030 *old_pending_adj
= pending_stack_adjust
;
1031 pending_stack_adjust
= 0;
1034 copy
= gen_rtx_MEM (BLKmode
,
1035 allocate_dynamic_stack_space (size_rtx
,
1037 TYPE_ALIGN (type
)));
1041 int size
= int_size_in_bytes (type
);
1042 copy
= assign_stack_temp (TYPE_MODE (type
), size
, 0);
1045 MEM_SET_IN_STRUCT_P (copy
, AGGREGATE_TYPE_P (type
));
1047 store_expr (args
[i
].tree_value
, copy
, 0);
1050 args
[i
].tree_value
= build1 (ADDR_EXPR
,
1051 build_pointer_type (type
),
1052 make_tree (type
, copy
));
1053 type
= build_pointer_type (type
);
1057 mode
= TYPE_MODE (type
);
1058 unsignedp
= TREE_UNSIGNED (type
);
1060 #ifdef PROMOTE_FUNCTION_ARGS
1061 mode
= promote_mode (type
, mode
, &unsignedp
, 1);
1064 args
[i
].unsignedp
= unsignedp
;
1065 args
[i
].mode
= mode
;
1066 args
[i
].reg
= FUNCTION_ARG (*args_so_far
, mode
, type
,
1067 argpos
< n_named_args
);
1068 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1071 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far
, mode
, type
,
1072 argpos
< n_named_args
);
1075 args
[i
].pass_on_stack
= MUST_PASS_IN_STACK (mode
, type
);
1077 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1078 it means that we are to pass this arg in the register(s) designated
1079 by the PARALLEL, but also to pass it in the stack. */
1080 if (args
[i
].reg
&& GET_CODE (args
[i
].reg
) == PARALLEL
1081 && XEXP (XVECEXP (args
[i
].reg
, 0, 0), 0) == 0)
1082 args
[i
].pass_on_stack
= 1;
1084 /* If this is an addressable type, we must preallocate the stack
1085 since we must evaluate the object into its final location.
1087 If this is to be passed in both registers and the stack, it is simpler
1089 if (TREE_ADDRESSABLE (type
)
1090 || (args
[i
].pass_on_stack
&& args
[i
].reg
!= 0))
1091 *must_preallocate
= 1;
1093 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1094 we cannot consider this function call constant. */
1095 if (TREE_ADDRESSABLE (type
))
1098 /* Compute the stack-size of this argument. */
1099 if (args
[i
].reg
== 0 || args
[i
].partial
!= 0
1100 || reg_parm_stack_space
> 0
1101 || args
[i
].pass_on_stack
)
1102 locate_and_pad_parm (mode
, type
,
1103 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1108 fndecl
, args_size
, &args
[i
].offset
,
1109 &args
[i
].size
, &alignment_pad
);
1111 #ifndef ARGS_GROW_DOWNWARD
1112 args
[i
].slot_offset
= *args_size
;
1115 args
[i
].alignment_pad
= alignment_pad
;
1117 /* If a part of the arg was put into registers,
1118 don't include that part in the amount pushed. */
1119 if (reg_parm_stack_space
== 0 && ! args
[i
].pass_on_stack
)
1120 args
[i
].size
.constant
-= ((args
[i
].partial
* UNITS_PER_WORD
)
1121 / (PARM_BOUNDARY
/ BITS_PER_UNIT
)
1122 * (PARM_BOUNDARY
/ BITS_PER_UNIT
));
1124 /* Update ARGS_SIZE, the total stack space for args so far. */
1126 args_size
->constant
+= args
[i
].size
.constant
;
1127 if (args
[i
].size
.var
)
1129 ADD_PARM_SIZE (*args_size
, args
[i
].size
.var
);
1132 /* Since the slot offset points to the bottom of the slot,
1133 we must record it after incrementing if the args grow down. */
1134 #ifdef ARGS_GROW_DOWNWARD
1135 args
[i
].slot_offset
= *args_size
;
1137 args
[i
].slot_offset
.constant
= -args_size
->constant
;
1140 SUB_PARM_SIZE (args
[i
].slot_offset
, args_size
->var
);
1144 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1145 have been used, etc. */
1147 FUNCTION_ARG_ADVANCE (*args_so_far
, TYPE_MODE (type
), type
,
1148 argpos
< n_named_args
);
1152 /* Update ARGS_SIZE to contain the total size for the argument block.
1153 Return the original constant component of the argument block's size.
1155 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1156 for arguments passed in registers. */
1159 compute_argument_block_size (reg_parm_stack_space
, args_size
)
1160 int reg_parm_stack_space
;
1161 struct args_size
*args_size
;
1163 int unadjusted_args_size
= args_size
->constant
;
1165 /* Compute the actual size of the argument block required. The variable
1166 and constant sizes must be combined, the size may have to be rounded,
1167 and there may be a minimum required size. */
1171 args_size
->var
= ARGS_SIZE_TREE (*args_size
);
1172 args_size
->constant
= 0;
1174 #ifdef PREFERRED_STACK_BOUNDARY
1175 if (PREFERRED_STACK_BOUNDARY
!= BITS_PER_UNIT
)
1176 args_size
->var
= round_up (args_size
->var
, STACK_BYTES
);
1179 if (reg_parm_stack_space
> 0)
1182 = size_binop (MAX_EXPR
, args_size
->var
,
1183 size_int (reg_parm_stack_space
));
1185 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1186 /* The area corresponding to register parameters is not to count in
1187 the size of the block we need. So make the adjustment. */
1189 = size_binop (MINUS_EXPR
, args_size
->var
,
1190 size_int (reg_parm_stack_space
));
1196 #ifdef PREFERRED_STACK_BOUNDARY
1197 args_size
->constant
= (((args_size
->constant
1198 + pending_stack_adjust
1200 / STACK_BYTES
* STACK_BYTES
)
1201 - pending_stack_adjust
);
1204 args_size
->constant
= MAX (args_size
->constant
,
1205 reg_parm_stack_space
);
1207 #ifdef MAYBE_REG_PARM_STACK_SPACE
1208 if (reg_parm_stack_space
== 0)
1209 args_size
->constant
= 0;
1212 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1213 args_size
->constant
-= reg_parm_stack_space
;
1216 return unadjusted_args_size
;
1219 /* Precompute parameters as needed for a function call.
1221 IS_CONST indicates the target function is a pure function.
1223 MUST_PREALLOCATE indicates that we must preallocate stack space for
1224 any stack arguments.
1226 NUM_ACTUALS is the number of arguments.
1228 ARGS is an array containing information for each argument; this routine
1229 fills in the INITIAL_VALUE and VALUE fields for each precomputed argument.
1231 ARGS_SIZE contains information about the size of the arg list. */
1234 precompute_arguments (is_const
, must_preallocate
, num_actuals
, args
, args_size
)
1236 int must_preallocate
;
1238 struct arg_data
*args
;
1239 struct args_size
*args_size
;
1243 /* If this function call is cse'able, precompute all the parameters.
1244 Note that if the parameter is constructed into a temporary, this will
1245 cause an additional copy because the parameter will be constructed
1246 into a temporary location and then copied into the outgoing arguments.
1247 If a parameter contains a call to alloca and this function uses the
1248 stack, precompute the parameter. */
1250 /* If we preallocated the stack space, and some arguments must be passed
1251 on the stack, then we must precompute any parameter which contains a
1252 function call which will store arguments on the stack.
1253 Otherwise, evaluating the parameter may clobber previous parameters
1254 which have already been stored into the stack. */
1256 for (i
= 0; i
< num_actuals
; i
++)
1258 || ((args_size
->var
!= 0 || args_size
->constant
!= 0)
1259 && calls_function (args
[i
].tree_value
, 1))
1260 || (must_preallocate
1261 && (args_size
->var
!= 0 || args_size
->constant
!= 0)
1262 && calls_function (args
[i
].tree_value
, 0)))
1264 /* If this is an addressable type, we cannot pre-evaluate it. */
1265 if (TREE_ADDRESSABLE (TREE_TYPE (args
[i
].tree_value
)))
1270 args
[i
].initial_value
= args
[i
].value
1271 = expand_expr (args
[i
].tree_value
, NULL_RTX
, VOIDmode
, 0);
1273 preserve_temp_slots (args
[i
].value
);
1276 /* ANSI doesn't require a sequence point here,
1277 but PCC has one, so this will avoid some problems. */
1280 args
[i
].initial_value
= args
[i
].value
1281 = protect_from_queue (args
[i
].initial_value
, 0);
1283 if (TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)) != args
[i
].mode
)
1285 = convert_modes (args
[i
].mode
,
1286 TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)),
1287 args
[i
].value
, args
[i
].unsignedp
);
1291 /* Given the current state of MUST_PREALLOCATE and information about
1292 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1293 compute and return the final value for MUST_PREALLOCATE. */
1296 finalize_must_preallocate (must_preallocate
, num_actuals
, args
, args_size
)
1297 int must_preallocate
;
1299 struct arg_data
*args
;
1300 struct args_size
*args_size
;
1302 /* See if we have or want to preallocate stack space.
1304 If we would have to push a partially-in-regs parm
1305 before other stack parms, preallocate stack space instead.
1307 If the size of some parm is not a multiple of the required stack
1308 alignment, we must preallocate.
1310 If the total size of arguments that would otherwise create a copy in
1311 a temporary (such as a CALL) is more than half the total argument list
1312 size, preallocation is faster.
1314 Another reason to preallocate is if we have a machine (like the m88k)
1315 where stack alignment is required to be maintained between every
1316 pair of insns, not just when the call is made. However, we assume here
1317 that such machines either do not have push insns (and hence preallocation
1318 would occur anyway) or the problem is taken care of with
1321 if (! must_preallocate
)
1323 int partial_seen
= 0;
1324 int copy_to_evaluate_size
= 0;
1327 for (i
= 0; i
< num_actuals
&& ! must_preallocate
; i
++)
1329 if (args
[i
].partial
> 0 && ! args
[i
].pass_on_stack
)
1331 else if (partial_seen
&& args
[i
].reg
== 0)
1332 must_preallocate
= 1;
1334 if (TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)) == BLKmode
1335 && (TREE_CODE (args
[i
].tree_value
) == CALL_EXPR
1336 || TREE_CODE (args
[i
].tree_value
) == TARGET_EXPR
1337 || TREE_CODE (args
[i
].tree_value
) == COND_EXPR
1338 || TREE_ADDRESSABLE (TREE_TYPE (args
[i
].tree_value
))))
1339 copy_to_evaluate_size
1340 += int_size_in_bytes (TREE_TYPE (args
[i
].tree_value
));
1343 if (copy_to_evaluate_size
* 2 >= args_size
->constant
1344 && args_size
->constant
> 0)
1345 must_preallocate
= 1;
1347 return must_preallocate
;
1350 /* If we preallocated stack space, compute the address of each argument
1351 and store it into the ARGS array.
1353 We need not ensure it is a valid memory address here; it will be
1354 validized when it is used.
1356 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1359 compute_argument_addresses (args
, argblock
, num_actuals
)
1360 struct arg_data
*args
;
1366 rtx arg_reg
= argblock
;
1367 int i
, arg_offset
= 0;
1369 if (GET_CODE (argblock
) == PLUS
)
1370 arg_reg
= XEXP (argblock
, 0), arg_offset
= INTVAL (XEXP (argblock
, 1));
1372 for (i
= 0; i
< num_actuals
; i
++)
1374 rtx offset
= ARGS_SIZE_RTX (args
[i
].offset
);
1375 rtx slot_offset
= ARGS_SIZE_RTX (args
[i
].slot_offset
);
1378 /* Skip this parm if it will not be passed on the stack. */
1379 if (! args
[i
].pass_on_stack
&& args
[i
].reg
!= 0)
1382 if (GET_CODE (offset
) == CONST_INT
)
1383 addr
= plus_constant (arg_reg
, INTVAL (offset
));
1385 addr
= gen_rtx_PLUS (Pmode
, arg_reg
, offset
);
1387 addr
= plus_constant (addr
, arg_offset
);
1388 args
[i
].stack
= gen_rtx_MEM (args
[i
].mode
, addr
);
1391 AGGREGATE_TYPE_P (TREE_TYPE (args
[i
].tree_value
)));
1393 if (GET_CODE (slot_offset
) == CONST_INT
)
1394 addr
= plus_constant (arg_reg
, INTVAL (slot_offset
));
1396 addr
= gen_rtx_PLUS (Pmode
, arg_reg
, slot_offset
);
1398 addr
= plus_constant (addr
, arg_offset
);
1399 args
[i
].stack_slot
= gen_rtx_MEM (args
[i
].mode
, addr
);
1404 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1405 in a call instruction.
1407 FNDECL is the tree node for the target function. For an indirect call
1408 FNDECL will be NULL_TREE.
1410 EXP is the CALL_EXPR for this call. */
1413 rtx_for_function_call (fndecl
, exp
)
1419 /* Get the function to call, in the form of RTL. */
1422 /* If this is the first use of the function, see if we need to
1423 make an external definition for it. */
1424 if (! TREE_USED (fndecl
))
1426 assemble_external (fndecl
);
1427 TREE_USED (fndecl
) = 1;
1430 /* Get a SYMBOL_REF rtx for the function address. */
1431 funexp
= XEXP (DECL_RTL (fndecl
), 0);
1434 /* Generate an rtx (probably a pseudo-register) for the address. */
1439 expand_expr (TREE_OPERAND (exp
, 0), NULL_RTX
, VOIDmode
, 0);
1440 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1442 /* Check the function is executable. */
1443 if (current_function_check_memory_usage
)
1445 #ifdef POINTERS_EXTEND_UNSIGNED
1446 /* It might be OK to convert funexp in place, but there's
1447 a lot going on between here and when it happens naturally
1448 that this seems safer. */
1449 funaddr
= convert_memory_address (Pmode
, funexp
);
1451 emit_library_call (chkr_check_exec_libfunc
, 1,
1460 /* Do the register loads required for any wholly-register parms or any
1461 parms which are passed both on the stack and in a register. Their
1462 expressions were already evaluated.
1464 Mark all register-parms as living through the call, putting these USE
1465 insns in the CALL_INSN_FUNCTION_USAGE field. */
1468 load_register_parameters (args
, num_actuals
, call_fusage
)
1469 struct arg_data
*args
;
1475 #ifdef LOAD_ARGS_REVERSED
1476 for (i
= num_actuals
- 1; i
>= 0; i
--)
1478 for (i
= 0; i
< num_actuals
; i
++)
1481 rtx reg
= args
[i
].reg
;
1482 int partial
= args
[i
].partial
;
1487 /* Set to non-negative if must move a word at a time, even if just
1488 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1489 we just use a normal move insn. This value can be zero if the
1490 argument is a zero size structure with no fields. */
1491 nregs
= (partial
? partial
1492 : (TYPE_MODE (TREE_TYPE (args
[i
].tree_value
)) == BLKmode
1493 ? ((int_size_in_bytes (TREE_TYPE (args
[i
].tree_value
))
1494 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
1497 /* Handle calls that pass values in multiple non-contiguous
1498 locations. The Irix 6 ABI has examples of this. */
1500 if (GET_CODE (reg
) == PARALLEL
)
1502 emit_group_load (reg
, args
[i
].value
,
1503 int_size_in_bytes (TREE_TYPE (args
[i
].tree_value
)),
1504 (TYPE_ALIGN (TREE_TYPE (args
[i
].tree_value
))
1508 /* If simple case, just do move. If normal partial, store_one_arg
1509 has already loaded the register for us. In all other cases,
1510 load the register(s) from memory. */
1512 else if (nregs
== -1)
1513 emit_move_insn (reg
, args
[i
].value
);
1515 /* If we have pre-computed the values to put in the registers in
1516 the case of non-aligned structures, copy them in now. */
1518 else if (args
[i
].n_aligned_regs
!= 0)
1519 for (j
= 0; j
< args
[i
].n_aligned_regs
; j
++)
1520 emit_move_insn (gen_rtx_REG (word_mode
, REGNO (reg
) + j
),
1521 args
[i
].aligned_regs
[j
]);
1523 else if (partial
== 0 || args
[i
].pass_on_stack
)
1524 move_block_to_reg (REGNO (reg
),
1525 validize_mem (args
[i
].value
), nregs
,
1528 /* Handle calls that pass values in multiple non-contiguous
1529 locations. The Irix 6 ABI has examples of this. */
1530 if (GET_CODE (reg
) == PARALLEL
)
1531 use_group_regs (call_fusage
, reg
);
1532 else if (nregs
== -1)
1533 use_reg (call_fusage
, reg
);
1535 use_regs (call_fusage
, REGNO (reg
), nregs
== 0 ? 1 : nregs
);
1540 /* Generate all the code for a function call
1541 and return an rtx for its value.
1542 Store the value in TARGET (specified as an rtx) if convenient.
1543 If the value is stored in TARGET then TARGET is returned.
1544 If IGNORE is nonzero, then we ignore the value of the function call. */
1547 expand_call (exp
, target
, ignore
)
1552 /* List of actual parameters. */
1553 tree actparms
= TREE_OPERAND (exp
, 1);
1554 /* RTX for the function to be called. */
1556 /* Data type of the function. */
1558 /* Declaration of the function being called,
1559 or 0 if the function is computed (not known by name). */
1563 /* Register in which non-BLKmode value will be returned,
1564 or 0 if no value or if value is BLKmode. */
1566 /* Address where we should return a BLKmode value;
1567 0 if value not BLKmode. */
1568 rtx structure_value_addr
= 0;
1569 /* Nonzero if that address is being passed by treating it as
1570 an extra, implicit first parameter. Otherwise,
1571 it is passed by being copied directly into struct_value_rtx. */
1572 int structure_value_addr_parm
= 0;
1573 /* Size of aggregate value wanted, or zero if none wanted
1574 or if we are using the non-reentrant PCC calling convention
1575 or expecting the value in registers. */
1576 HOST_WIDE_INT struct_value_size
= 0;
1577 /* Nonzero if called function returns an aggregate in memory PCC style,
1578 by returning the address of where to find it. */
1579 int pcc_struct_value
= 0;
1581 /* Number of actual parameters in this call, including struct value addr. */
1583 /* Number of named args. Args after this are anonymous ones
1584 and they must all go on the stack. */
1587 /* Vector of information about each argument.
1588 Arguments are numbered in the order they will be pushed,
1589 not the order they are written. */
1590 struct arg_data
*args
;
1592 /* Total size in bytes of all the stack-parms scanned so far. */
1593 struct args_size args_size
;
1594 /* Size of arguments before any adjustments (such as rounding). */
1595 int unadjusted_args_size
;
1596 /* Data on reg parms scanned so far. */
1597 CUMULATIVE_ARGS args_so_far
;
1598 /* Nonzero if a reg parm has been scanned. */
1600 /* Nonzero if this is an indirect function call. */
1602 /* Nonzero if we must avoid push-insns in the args for this call.
1603 If stack space is allocated for register parameters, but not by the
1604 caller, then it is preallocated in the fixed part of the stack frame.
1605 So the entire argument block must then be preallocated (i.e., we
1606 ignore PUSH_ROUNDING in that case). */
1608 #ifdef PUSH_ROUNDING
1609 int must_preallocate
= 0;
1611 int must_preallocate
= 1;
1614 /* Size of the stack reserved for parameter registers. */
1615 int reg_parm_stack_space
= 0;
1617 /* Address of space preallocated for stack parms
1618 (on machines that lack push insns), or 0 if space not preallocated. */
1621 /* Nonzero if it is plausible that this is a call to alloca. */
1623 /* Nonzero if this is a call to malloc or a related function. */
1625 /* Nonzero if this is a call to setjmp or a related function. */
1627 /* Nonzero if this is a call to `longjmp'. */
1629 /* Nonzero if this is a call to an inline function. */
1630 int is_integrable
= 0;
1631 /* Nonzero if this is a call to a `const' function.
1632 Note that only explicitly named functions are handled as `const' here. */
1634 /* Nonzero if this is a call to a `volatile' function. */
1635 int is_volatile
= 0;
1636 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1637 /* Define the boundary of the register parm stack space that needs to be
1639 int low_to_save
= -1, high_to_save
;
1640 rtx save_area
= 0; /* Place that it is saved */
1643 #ifdef ACCUMULATE_OUTGOING_ARGS
1644 int initial_highest_arg_in_use
= highest_outgoing_arg_in_use
;
1645 char *initial_stack_usage_map
= stack_usage_map
;
1646 int old_stack_arg_under_construction
;
1649 rtx old_stack_level
= 0;
1650 int old_pending_adj
= 0;
1651 int old_inhibit_defer_pop
= inhibit_defer_pop
;
1652 rtx call_fusage
= 0;
1656 /* The value of the function call can be put in a hard register. But
1657 if -fcheck-memory-usage, code which invokes functions (and thus
1658 damages some hard registers) can be inserted before using the value.
1659 So, target is always a pseudo-register in that case. */
1660 if (current_function_check_memory_usage
)
1663 /* See if we can find a DECL-node for the actual function.
1664 As a result, decide whether this is a call to an integrable function. */
1666 p
= TREE_OPERAND (exp
, 0);
1667 if (TREE_CODE (p
) == ADDR_EXPR
)
1669 fndecl
= TREE_OPERAND (p
, 0);
1670 if (TREE_CODE (fndecl
) != FUNCTION_DECL
)
1675 && fndecl
!= current_function_decl
1676 && DECL_INLINE (fndecl
)
1677 && DECL_SAVED_INSNS (fndecl
)
1678 && DECL_SAVED_INSNS (fndecl
)->inlinable
)
1680 else if (! TREE_ADDRESSABLE (fndecl
))
1682 /* In case this function later becomes inlinable,
1683 record that there was already a non-inline call to it.
1685 Use abstraction instead of setting TREE_ADDRESSABLE
1687 if (DECL_INLINE (fndecl
) && warn_inline
&& !flag_no_inline
1690 warning_with_decl (fndecl
, "can't inline call to `%s'");
1691 warning ("called from here");
1693 mark_addressable (fndecl
);
1696 if (TREE_READONLY (fndecl
) && ! TREE_THIS_VOLATILE (fndecl
)
1697 && TYPE_MODE (TREE_TYPE (exp
)) != VOIDmode
)
1700 if (TREE_THIS_VOLATILE (fndecl
))
1705 /* If we don't have specific function to call, see if we have a
1706 constant or `noreturn' function from the type. */
1709 is_const
= TREE_READONLY (TREE_TYPE (TREE_TYPE (p
)));
1710 is_volatile
= TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p
)));
1713 #ifdef REG_PARM_STACK_SPACE
1714 #ifdef MAYBE_REG_PARM_STACK_SPACE
1715 reg_parm_stack_space
= MAYBE_REG_PARM_STACK_SPACE
;
1717 reg_parm_stack_space
= REG_PARM_STACK_SPACE (fndecl
);
1721 #if defined(PUSH_ROUNDING) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1722 if (reg_parm_stack_space
> 0)
1723 must_preallocate
= 1;
1726 /* Warn if this value is an aggregate type,
1727 regardless of which calling convention we are using for it. */
1728 if (warn_aggregate_return
&& AGGREGATE_TYPE_P (TREE_TYPE (exp
)))
1729 warning ("function call has aggregate value");
1731 /* Set up a place to return a structure. */
1733 /* Cater to broken compilers. */
1734 if (aggregate_value_p (exp
))
1736 /* This call returns a big structure. */
1739 #ifdef PCC_STATIC_STRUCT_RETURN
1741 pcc_struct_value
= 1;
1742 /* Easier than making that case work right. */
1745 /* In case this is a static function, note that it has been
1747 if (! TREE_ADDRESSABLE (fndecl
))
1748 mark_addressable (fndecl
);
1752 #else /* not PCC_STATIC_STRUCT_RETURN */
1754 struct_value_size
= int_size_in_bytes (TREE_TYPE (exp
));
1756 if (target
&& GET_CODE (target
) == MEM
)
1757 structure_value_addr
= XEXP (target
, 0);
1760 /* Assign a temporary to hold the value. */
1763 /* For variable-sized objects, we must be called with a target
1764 specified. If we were to allocate space on the stack here,
1765 we would have no way of knowing when to free it. */
1767 if (struct_value_size
< 0)
1770 /* This DECL is just something to feed to mark_addressable;
1771 it doesn't get pushed. */
1772 d
= build_decl (VAR_DECL
, NULL_TREE
, TREE_TYPE (exp
));
1773 DECL_RTL (d
) = assign_temp (TREE_TYPE (exp
), 1, 0, 1);
1774 mark_addressable (d
);
1775 mark_temp_addr_taken (DECL_RTL (d
));
1776 structure_value_addr
= XEXP (DECL_RTL (d
), 0);
1781 #endif /* not PCC_STATIC_STRUCT_RETURN */
1784 /* If called function is inline, try to integrate it. */
1789 #ifdef ACCUMULATE_OUTGOING_ARGS
1790 rtx before_call
= get_last_insn ();
1793 temp
= expand_inline_function (fndecl
, actparms
, target
,
1794 ignore
, TREE_TYPE (exp
),
1795 structure_value_addr
);
1797 /* If inlining succeeded, return. */
1798 if (temp
!= (rtx
) (HOST_WIDE_INT
) -1)
1800 #ifdef ACCUMULATE_OUTGOING_ARGS
1801 /* If the outgoing argument list must be preserved, push
1802 the stack before executing the inlined function if it
1805 for (i
= reg_parm_stack_space
- 1; i
>= 0; i
--)
1806 if (i
< highest_outgoing_arg_in_use
&& stack_usage_map
[i
] != 0)
1809 if (stack_arg_under_construction
|| i
>= 0)
1812 = before_call
? NEXT_INSN (before_call
) : get_insns ();
1813 rtx insn
= NULL_RTX
, seq
;
1815 /* Look for a call in the inline function code.
1816 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1817 nonzero then there is a call and it is not necessary
1818 to scan the insns. */
1820 if (DECL_SAVED_INSNS (fndecl
)->outgoing_args_size
== 0)
1821 for (insn
= first_insn
; insn
; insn
= NEXT_INSN (insn
))
1822 if (GET_CODE (insn
) == CALL_INSN
)
1827 /* Reserve enough stack space so that the largest
1828 argument list of any function call in the inline
1829 function does not overlap the argument list being
1830 evaluated. This is usually an overestimate because
1831 allocate_dynamic_stack_space reserves space for an
1832 outgoing argument list in addition to the requested
1833 space, but there is no way to ask for stack space such
1834 that an argument list of a certain length can be
1837 Add the stack space reserved for register arguments, if
1838 any, in the inline function. What is really needed is the
1839 largest value of reg_parm_stack_space in the inline
1840 function, but that is not available. Using the current
1841 value of reg_parm_stack_space is wrong, but gives
1842 correct results on all supported machines. */
1844 int adjust
= (DECL_SAVED_INSNS (fndecl
)->outgoing_args_size
1845 + reg_parm_stack_space
);
1848 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
1849 allocate_dynamic_stack_space (GEN_INT (adjust
),
1850 NULL_RTX
, BITS_PER_UNIT
);
1853 emit_insns_before (seq
, first_insn
);
1854 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
1859 /* If the result is equivalent to TARGET, return TARGET to simplify
1860 checks in store_expr. They can be equivalent but not equal in the
1861 case of a function that returns BLKmode. */
1862 if (temp
!= target
&& rtx_equal_p (temp
, target
))
1867 /* If inlining failed, mark FNDECL as needing to be compiled
1868 separately after all. If function was declared inline,
1870 if (DECL_INLINE (fndecl
) && warn_inline
&& !flag_no_inline
1871 && optimize
> 0 && ! TREE_ADDRESSABLE (fndecl
))
1873 warning_with_decl (fndecl
, "inlining failed in call to `%s'");
1874 warning ("called from here");
1876 mark_addressable (fndecl
);
1879 function_call_count
++;
1881 if (fndecl
&& DECL_NAME (fndecl
))
1882 name
= IDENTIFIER_POINTER (DECL_NAME (fndecl
));
1884 /* See if this is a call to a function that can return more than once
1885 or a call to longjmp or malloc. */
1886 special_function_p (fndecl
, &returns_twice
, &is_longjmp
,
1887 &is_malloc
, &may_be_alloca
);
1890 current_function_calls_alloca
= 1;
1892 /* Operand 0 is a pointer-to-function; get the type of the function. */
1893 funtype
= TREE_TYPE (TREE_OPERAND (exp
, 0));
1894 if (! POINTER_TYPE_P (funtype
))
1896 funtype
= TREE_TYPE (funtype
);
1898 /* When calling a const function, we must pop the stack args right away,
1899 so that the pop is deleted or moved with the call. */
1903 /* Don't let pending stack adjusts add up to too much.
1904 Also, do all pending adjustments now
1905 if there is any chance this might be a call to alloca. */
1907 if (pending_stack_adjust
>= 32
1908 || (pending_stack_adjust
> 0 && may_be_alloca
))
1909 do_pending_stack_adjust ();
1911 /* Push the temporary stack slot level so that we can free any temporaries
1915 /* Start updating where the next arg would go.
1917 On some machines (such as the PA) indirect calls have a different
1918 calling convention than normal calls. The last argument in
1919 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
1921 INIT_CUMULATIVE_ARGS (args_so_far
, funtype
, NULL_RTX
, (fndecl
== 0));
1923 /* If struct_value_rtx is 0, it means pass the address
1924 as if it were an extra parameter. */
1925 if (structure_value_addr
&& struct_value_rtx
== 0)
1927 /* If structure_value_addr is a REG other than
1928 virtual_outgoing_args_rtx, we can use always use it. If it
1929 is not a REG, we must always copy it into a register.
1930 If it is virtual_outgoing_args_rtx, we must copy it to another
1931 register in some cases. */
1932 rtx temp
= (GET_CODE (structure_value_addr
) != REG
1933 #ifdef ACCUMULATE_OUTGOING_ARGS
1934 || (stack_arg_under_construction
1935 && structure_value_addr
== virtual_outgoing_args_rtx
)
1937 ? copy_addr_to_reg (structure_value_addr
)
1938 : structure_value_addr
);
1941 = tree_cons (error_mark_node
,
1942 make_tree (build_pointer_type (TREE_TYPE (funtype
)),
1945 structure_value_addr_parm
= 1;
1948 /* Count the arguments and set NUM_ACTUALS. */
1949 for (p
= actparms
, i
= 0; p
; p
= TREE_CHAIN (p
)) i
++;
1952 /* Compute number of named args.
1953 Normally, don't include the last named arg if anonymous args follow.
1954 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
1955 (If no anonymous args follow, the result of list_length is actually
1956 one too large. This is harmless.)
1958 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
1959 zero, this machine will be able to place unnamed args that were passed in
1960 registers into the stack. So treat all args as named. This allows the
1961 insns emitting for a specific argument list to be independent of the
1962 function declaration.
1964 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any reliable
1965 way to pass unnamed args in registers, so we must force them into
1968 if ((STRICT_ARGUMENT_NAMING
1969 || ! PRETEND_OUTGOING_VARARGS_NAMED
)
1970 && TYPE_ARG_TYPES (funtype
) != 0)
1972 = (list_length (TYPE_ARG_TYPES (funtype
))
1973 /* Don't include the last named arg. */
1974 - (STRICT_ARGUMENT_NAMING
? 0 : 1)
1975 /* Count the struct value address, if it is passed as a parm. */
1976 + structure_value_addr_parm
);
1978 /* If we know nothing, treat all args as named. */
1979 n_named_args
= num_actuals
;
1981 /* Make a vector to hold all the information about each arg. */
1982 args
= (struct arg_data
*) alloca (num_actuals
* sizeof (struct arg_data
));
1983 bzero ((char *) args
, num_actuals
* sizeof (struct arg_data
));
1985 /* Build up entries inthe ARGS array, compute the size of the arguments
1986 into ARGS_SIZE, etc. */
1987 initialize_argument_information (num_actuals
, args
, &args_size
, n_named_args
,
1988 actparms
, fndecl
, &args_so_far
,
1989 reg_parm_stack_space
, &old_stack_level
,
1990 &old_pending_adj
, &must_preallocate
,
1993 #ifdef FINAL_REG_PARM_STACK_SPACE
1994 reg_parm_stack_space
= FINAL_REG_PARM_STACK_SPACE (args_size
.constant
,
2000 /* If this function requires a variable-sized argument list, don't try to
2001 make a cse'able block for this call. We may be able to do this
2002 eventually, but it is too complicated to keep track of what insns go
2003 in the cse'able block and which don't. */
2006 must_preallocate
= 1;
2009 /* Compute the actual size of the argument block required. The variable
2010 and constant sizes must be combined, the size may have to be rounded,
2011 and there may be a minimum required size. */
2012 unadjusted_args_size
2013 = compute_argument_block_size (reg_parm_stack_space
, &args_size
);
2015 /* Now make final decision about preallocating stack space. */
2016 must_preallocate
= finalize_must_preallocate (must_preallocate
,
2017 num_actuals
, args
, &args_size
);
2019 /* If the structure value address will reference the stack pointer, we must
2020 stabilize it. We don't need to do this if we know that we are not going
2021 to adjust the stack pointer in processing this call. */
2023 if (structure_value_addr
2024 && (reg_mentioned_p (virtual_stack_dynamic_rtx
, structure_value_addr
)
2025 || reg_mentioned_p (virtual_outgoing_args_rtx
, structure_value_addr
))
2027 #ifndef ACCUMULATE_OUTGOING_ARGS
2028 || args_size
.constant
2031 structure_value_addr
= copy_to_reg (structure_value_addr
);
2033 /* Precompute any arguments as needed. */
2034 precompute_arguments (is_const
, must_preallocate
, num_actuals
,
2037 /* Now we are about to start emitting insns that can be deleted
2038 if a libcall is deleted. */
2039 if (is_const
|| is_malloc
)
2042 /* If we have no actual push instructions, or shouldn't use them,
2043 make space for all args right now. */
2045 if (args_size
.var
!= 0)
2047 if (old_stack_level
== 0)
2049 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
2050 old_pending_adj
= pending_stack_adjust
;
2051 pending_stack_adjust
= 0;
2052 #ifdef ACCUMULATE_OUTGOING_ARGS
2053 /* stack_arg_under_construction says whether a stack arg is
2054 being constructed at the old stack level. Pushing the stack
2055 gets a clean outgoing argument block. */
2056 old_stack_arg_under_construction
= stack_arg_under_construction
;
2057 stack_arg_under_construction
= 0;
2060 argblock
= push_block (ARGS_SIZE_RTX (args_size
), 0, 0);
2064 /* Note that we must go through the motions of allocating an argument
2065 block even if the size is zero because we may be storing args
2066 in the area reserved for register arguments, which may be part of
2069 int needed
= args_size
.constant
;
2071 /* Store the maximum argument space used. It will be pushed by
2072 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2075 if (needed
> current_function_outgoing_args_size
)
2076 current_function_outgoing_args_size
= needed
;
2078 if (must_preallocate
)
2080 #ifdef ACCUMULATE_OUTGOING_ARGS
2081 /* Since the stack pointer will never be pushed, it is possible for
2082 the evaluation of a parm to clobber something we have already
2083 written to the stack. Since most function calls on RISC machines
2084 do not use the stack, this is uncommon, but must work correctly.
2086 Therefore, we save any area of the stack that was already written
2087 and that we are using. Here we set up to do this by making a new
2088 stack usage map from the old one. The actual save will be done
2091 Another approach might be to try to reorder the argument
2092 evaluations to avoid this conflicting stack usage. */
2094 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2095 /* Since we will be writing into the entire argument area, the
2096 map must be allocated for its entire size, not just the part that
2097 is the responsibility of the caller. */
2098 needed
+= reg_parm_stack_space
;
2101 #ifdef ARGS_GROW_DOWNWARD
2102 highest_outgoing_arg_in_use
= MAX (initial_highest_arg_in_use
,
2105 highest_outgoing_arg_in_use
= MAX (initial_highest_arg_in_use
,
2108 stack_usage_map
= (char *) alloca (highest_outgoing_arg_in_use
);
2110 if (initial_highest_arg_in_use
)
2111 bcopy (initial_stack_usage_map
, stack_usage_map
,
2112 initial_highest_arg_in_use
);
2114 if (initial_highest_arg_in_use
!= highest_outgoing_arg_in_use
)
2115 bzero (&stack_usage_map
[initial_highest_arg_in_use
],
2116 highest_outgoing_arg_in_use
- initial_highest_arg_in_use
);
2119 /* The address of the outgoing argument list must not be copied to a
2120 register here, because argblock would be left pointing to the
2121 wrong place after the call to allocate_dynamic_stack_space below.
2124 argblock
= virtual_outgoing_args_rtx
;
2126 #else /* not ACCUMULATE_OUTGOING_ARGS */
2127 if (inhibit_defer_pop
== 0)
2129 /* Try to reuse some or all of the pending_stack_adjust
2130 to get this space. Maybe we can avoid any pushing. */
2131 if (needed
> pending_stack_adjust
)
2133 needed
-= pending_stack_adjust
;
2134 pending_stack_adjust
= 0;
2138 pending_stack_adjust
-= needed
;
2142 /* Special case this because overhead of `push_block' in this
2143 case is non-trivial. */
2145 argblock
= virtual_outgoing_args_rtx
;
2147 argblock
= push_block (GEN_INT (needed
), 0, 0);
2149 /* We only really need to call `copy_to_reg' in the case where push
2150 insns are going to be used to pass ARGBLOCK to a function
2151 call in ARGS. In that case, the stack pointer changes value
2152 from the allocation point to the call point, and hence
2153 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
2154 But might as well always do it. */
2155 argblock
= copy_to_reg (argblock
);
2156 #endif /* not ACCUMULATE_OUTGOING_ARGS */
2160 #ifdef ACCUMULATE_OUTGOING_ARGS
2161 /* The save/restore code in store_one_arg handles all cases except one:
2162 a constructor call (including a C function returning a BLKmode struct)
2163 to initialize an argument. */
2164 if (stack_arg_under_construction
)
2166 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2167 rtx push_size
= GEN_INT (reg_parm_stack_space
+ args_size
.constant
);
2169 rtx push_size
= GEN_INT (args_size
.constant
);
2171 if (old_stack_level
== 0)
2173 emit_stack_save (SAVE_BLOCK
, &old_stack_level
, NULL_RTX
);
2174 old_pending_adj
= pending_stack_adjust
;
2175 pending_stack_adjust
= 0;
2176 /* stack_arg_under_construction says whether a stack arg is
2177 being constructed at the old stack level. Pushing the stack
2178 gets a clean outgoing argument block. */
2179 old_stack_arg_under_construction
= stack_arg_under_construction
;
2180 stack_arg_under_construction
= 0;
2181 /* Make a new map for the new argument list. */
2182 stack_usage_map
= (char *)alloca (highest_outgoing_arg_in_use
);
2183 bzero (stack_usage_map
, highest_outgoing_arg_in_use
);
2184 highest_outgoing_arg_in_use
= 0;
2186 allocate_dynamic_stack_space (push_size
, NULL_RTX
, BITS_PER_UNIT
);
2188 /* If argument evaluation might modify the stack pointer, copy the
2189 address of the argument list to a register. */
2190 for (i
= 0; i
< num_actuals
; i
++)
2191 if (args
[i
].pass_on_stack
)
2193 argblock
= copy_addr_to_reg (argblock
);
2198 compute_argument_addresses (args
, argblock
, num_actuals
);
2200 #ifdef PUSH_ARGS_REVERSED
2201 #ifdef PREFERRED_STACK_BOUNDARY
2202 /* If we push args individually in reverse order, perform stack alignment
2203 before the first push (the last arg). */
2205 anti_adjust_stack (GEN_INT (args_size
.constant
- unadjusted_args_size
));
2209 /* Don't try to defer pops if preallocating, not even from the first arg,
2210 since ARGBLOCK probably refers to the SP. */
2214 funexp
= rtx_for_function_call (fndecl
, exp
);
2216 /* Figure out the register where the value, if any, will come back. */
2218 if (TYPE_MODE (TREE_TYPE (exp
)) != VOIDmode
2219 && ! structure_value_addr
)
2221 if (pcc_struct_value
)
2222 valreg
= hard_function_value (build_pointer_type (TREE_TYPE (exp
)),
2225 valreg
= hard_function_value (TREE_TYPE (exp
), fndecl
, 0);
2228 /* Precompute all register parameters. It isn't safe to compute anything
2229 once we have started filling any specific hard regs. */
2230 precompute_register_parameters (num_actuals
, args
, ®_parm_seen
);
2232 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2234 /* Save the fixed argument area if it's part of the caller's frame and
2235 is clobbered by argument setup for this call. */
2236 save_area
= save_fixed_argument_area (reg_parm_stack_space
, argblock
,
2237 &low_to_save
, &high_to_save
);
2241 /* Now store (and compute if necessary) all non-register parms.
2242 These come before register parms, since they can require block-moves,
2243 which could clobber the registers used for register parms.
2244 Parms which have partial registers are not stored here,
2245 but we do preallocate space here if they want that. */
2247 for (i
= 0; i
< num_actuals
; i
++)
2248 if (args
[i
].reg
== 0 || args
[i
].pass_on_stack
)
2249 store_one_arg (&args
[i
], argblock
, may_be_alloca
,
2250 args_size
.var
!= 0, reg_parm_stack_space
);
2252 /* If we have a parm that is passed in registers but not in memory
2253 and whose alignment does not permit a direct copy into registers,
2254 make a group of pseudos that correspond to each register that we
2256 if (STRICT_ALIGNMENT
)
2257 store_unaligned_arguments_into_pseudos (args
, num_actuals
);
2259 /* Now store any partially-in-registers parm.
2260 This is the last place a block-move can happen. */
2262 for (i
= 0; i
< num_actuals
; i
++)
2263 if (args
[i
].partial
!= 0 && ! args
[i
].pass_on_stack
)
2264 store_one_arg (&args
[i
], argblock
, may_be_alloca
,
2265 args_size
.var
!= 0, reg_parm_stack_space
);
2267 #ifndef PUSH_ARGS_REVERSED
2268 #ifdef PREFERRED_STACK_BOUNDARY
2269 /* If we pushed args in forward order, perform stack alignment
2270 after pushing the last arg. */
2272 anti_adjust_stack (GEN_INT (args_size
.constant
- unadjusted_args_size
));
2276 /* If register arguments require space on the stack and stack space
2277 was not preallocated, allocate stack space here for arguments
2278 passed in registers. */
2279 #if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
2280 if (must_preallocate
== 0 && reg_parm_stack_space
> 0)
2281 anti_adjust_stack (GEN_INT (reg_parm_stack_space
));
2284 /* Pass the function the address in which to return a structure value. */
2285 if (structure_value_addr
&& ! structure_value_addr_parm
)
2287 emit_move_insn (struct_value_rtx
,
2289 force_operand (structure_value_addr
,
2292 /* Mark the memory for the aggregate as write-only. */
2293 if (current_function_check_memory_usage
)
2294 emit_library_call (chkr_set_right_libfunc
, 1,
2296 structure_value_addr
, Pmode
,
2297 GEN_INT (struct_value_size
), TYPE_MODE (sizetype
),
2298 GEN_INT (MEMORY_USE_WO
),
2299 TYPE_MODE (integer_type_node
));
2301 if (GET_CODE (struct_value_rtx
) == REG
)
2302 use_reg (&call_fusage
, struct_value_rtx
);
2305 funexp
= prepare_call_address (funexp
, fndecl
, &call_fusage
, reg_parm_seen
);
2307 load_register_parameters (args
, num_actuals
, &call_fusage
);
2309 /* Perform postincrements before actually calling the function. */
2312 /* All arguments and registers used for the call must be set up by now! */
2314 /* Generate the actual call instruction. */
2315 emit_call_1 (funexp
, fndecl
, funtype
, unadjusted_args_size
,
2316 args_size
.constant
, struct_value_size
,
2317 FUNCTION_ARG (args_so_far
, VOIDmode
, void_type_node
, 1),
2318 valreg
, old_inhibit_defer_pop
, call_fusage
, is_const
);
2320 /* If call is cse'able, make appropriate pair of reg-notes around it.
2321 Test valreg so we don't crash; may safely ignore `const'
2322 if return type is void. Disable for PARALLEL return values, because
2323 we have no way to move such values into a pseudo register. */
2324 if (is_const
&& valreg
!= 0 && GET_CODE (valreg
) != PARALLEL
)
2327 rtx temp
= gen_reg_rtx (GET_MODE (valreg
));
2330 /* Mark the return value as a pointer if needed. */
2331 if (TREE_CODE (TREE_TYPE (exp
)) == POINTER_TYPE
)
2333 tree pointed_to
= TREE_TYPE (TREE_TYPE (exp
));
2334 mark_reg_pointer (temp
, TYPE_ALIGN (pointed_to
) / BITS_PER_UNIT
);
2337 /* Construct an "equal form" for the value which mentions all the
2338 arguments in order as well as the function name. */
2339 #ifdef PUSH_ARGS_REVERSED
2340 for (i
= 0; i
< num_actuals
; i
++)
2341 note
= gen_rtx_EXPR_LIST (VOIDmode
, args
[i
].initial_value
, note
);
2343 for (i
= num_actuals
- 1; i
>= 0; i
--)
2344 note
= gen_rtx_EXPR_LIST (VOIDmode
, args
[i
].initial_value
, note
);
2346 note
= gen_rtx_EXPR_LIST (VOIDmode
, funexp
, note
);
2348 insns
= get_insns ();
2351 emit_libcall_block (insns
, temp
, valreg
, note
);
2357 /* Otherwise, just write out the sequence without a note. */
2358 rtx insns
= get_insns ();
2365 rtx temp
= gen_reg_rtx (GET_MODE (valreg
));
2368 /* The return value from a malloc-like function is a pointer. */
2369 if (TREE_CODE (TREE_TYPE (exp
)) == POINTER_TYPE
)
2370 mark_reg_pointer (temp
, BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
);
2372 emit_move_insn (temp
, valreg
);
2374 /* The return value from a malloc-like function can not alias
2376 last
= get_last_insn ();
2378 gen_rtx_EXPR_LIST (REG_NOALIAS
, temp
, REG_NOTES (last
));
2380 /* Write out the sequence. */
2381 insns
= get_insns ();
2387 /* For calls to `setjmp', etc., inform flow.c it should complain
2388 if nonvolatile values are live. */
2392 emit_note (name
, NOTE_INSN_SETJMP
);
2393 current_function_calls_setjmp
= 1;
2397 current_function_calls_longjmp
= 1;
2399 /* Notice functions that cannot return.
2400 If optimizing, insns emitted below will be dead.
2401 If not optimizing, they will exist, which is useful
2402 if the user uses the `return' command in the debugger. */
2404 if (is_volatile
|| is_longjmp
)
2407 /* If value type not void, return an rtx for the value. */
2409 /* If there are cleanups to be called, don't use a hard reg as target.
2410 We need to double check this and see if it matters anymore. */
2411 if (any_pending_cleanups (1)
2412 && target
&& REG_P (target
)
2413 && REGNO (target
) < FIRST_PSEUDO_REGISTER
)
2416 if (TYPE_MODE (TREE_TYPE (exp
)) == VOIDmode
2419 target
= const0_rtx
;
2421 else if (structure_value_addr
)
2423 if (target
== 0 || GET_CODE (target
) != MEM
)
2425 target
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp
)),
2426 memory_address (TYPE_MODE (TREE_TYPE (exp
)),
2427 structure_value_addr
));
2428 MEM_SET_IN_STRUCT_P (target
,
2429 AGGREGATE_TYPE_P (TREE_TYPE (exp
)));
2432 else if (pcc_struct_value
)
2434 /* This is the special C++ case where we need to
2435 know what the true target was. We take care to
2436 never use this value more than once in one expression. */
2437 target
= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp
)),
2438 copy_to_reg (valreg
));
2439 MEM_SET_IN_STRUCT_P (target
, AGGREGATE_TYPE_P (TREE_TYPE (exp
)));
2441 /* Handle calls that return values in multiple non-contiguous locations.
2442 The Irix 6 ABI has examples of this. */
2443 else if (GET_CODE (valreg
) == PARALLEL
)
2445 int bytes
= int_size_in_bytes (TREE_TYPE (exp
));
2449 target
= assign_stack_temp (TYPE_MODE (TREE_TYPE (exp
)), bytes
, 0);
2450 MEM_SET_IN_STRUCT_P (target
, AGGREGATE_TYPE_P (TREE_TYPE (exp
)));
2451 preserve_temp_slots (target
);
2454 if (! rtx_equal_p (target
, valreg
))
2455 emit_group_store (target
, valreg
, bytes
,
2456 TYPE_ALIGN (TREE_TYPE (exp
)) / BITS_PER_UNIT
);
2458 else if (target
&& GET_MODE (target
) == TYPE_MODE (TREE_TYPE (exp
))
2459 && GET_MODE (target
) == GET_MODE (valreg
))
2460 /* TARGET and VALREG cannot be equal at this point because the latter
2461 would not have REG_FUNCTION_VALUE_P true, while the former would if
2462 it were referring to the same register.
2464 If they refer to the same register, this move will be a no-op, except
2465 when function inlining is being done. */
2466 emit_move_insn (target
, valreg
);
2467 else if (TYPE_MODE (TREE_TYPE (exp
)) == BLKmode
)
2468 target
= copy_blkmode_from_reg (target
, valreg
, TREE_TYPE (exp
));
2470 target
= copy_to_reg (valreg
);
2472 #ifdef PROMOTE_FUNCTION_RETURN
2473 /* If we promoted this return value, make the proper SUBREG. TARGET
2474 might be const0_rtx here, so be careful. */
2475 if (GET_CODE (target
) == REG
2476 && TYPE_MODE (TREE_TYPE (exp
)) != BLKmode
2477 && GET_MODE (target
) != TYPE_MODE (TREE_TYPE (exp
)))
2479 tree type
= TREE_TYPE (exp
);
2480 int unsignedp
= TREE_UNSIGNED (type
);
2482 /* If we don't promote as expected, something is wrong. */
2483 if (GET_MODE (target
)
2484 != promote_mode (type
, TYPE_MODE (type
), &unsignedp
, 1))
2487 target
= gen_rtx_SUBREG (TYPE_MODE (type
), target
, 0);
2488 SUBREG_PROMOTED_VAR_P (target
) = 1;
2489 SUBREG_PROMOTED_UNSIGNED_P (target
) = unsignedp
;
2493 /* If size of args is variable or this was a constructor call for a stack
2494 argument, restore saved stack-pointer value. */
2496 if (old_stack_level
)
2498 emit_stack_restore (SAVE_BLOCK
, old_stack_level
, NULL_RTX
);
2499 pending_stack_adjust
= old_pending_adj
;
2500 #ifdef ACCUMULATE_OUTGOING_ARGS
2501 stack_arg_under_construction
= old_stack_arg_under_construction
;
2502 highest_outgoing_arg_in_use
= initial_highest_arg_in_use
;
2503 stack_usage_map
= initial_stack_usage_map
;
2506 #ifdef ACCUMULATE_OUTGOING_ARGS
2509 #ifdef REG_PARM_STACK_SPACE
2511 restore_fixed_argument_area (save_area
, argblock
,
2512 high_to_save
, low_to_save
);
2515 /* If we saved any argument areas, restore them. */
2516 for (i
= 0; i
< num_actuals
; i
++)
2517 if (args
[i
].save_area
)
2519 enum machine_mode save_mode
= GET_MODE (args
[i
].save_area
);
2521 = gen_rtx_MEM (save_mode
,
2522 memory_address (save_mode
,
2523 XEXP (args
[i
].stack_slot
, 0)));
2525 if (save_mode
!= BLKmode
)
2526 emit_move_insn (stack_area
, args
[i
].save_area
);
2528 emit_block_move (stack_area
, validize_mem (args
[i
].save_area
),
2529 GEN_INT (args
[i
].size
.constant
),
2530 PARM_BOUNDARY
/ BITS_PER_UNIT
);
2533 highest_outgoing_arg_in_use
= initial_highest_arg_in_use
;
2534 stack_usage_map
= initial_stack_usage_map
;
2538 /* If this was alloca, record the new stack level for nonlocal gotos.
2539 Check for the handler slots since we might not have a save area
2540 for non-local gotos. */
2542 if (may_be_alloca
&& nonlocal_goto_handler_slots
!= 0)
2543 emit_stack_save (SAVE_NONLOCAL
, &nonlocal_goto_stack_level
, NULL_RTX
);
2547 /* Free up storage we no longer need. */
2548 for (i
= 0; i
< num_actuals
; ++i
)
2549 if (args
[i
].aligned_regs
)
2550 free (args
[i
].aligned_regs
);
2555 /* Output a library call to function FUN (a SYMBOL_REF rtx)
2556 (emitting the queue unless NO_QUEUE is nonzero),
2557 for a value of mode OUTMODE,
2558 with NARGS different arguments, passed as alternating rtx values
2559 and machine_modes to convert them to.
2560 The rtx values should have been passed through protect_from_queue already.
2562 NO_QUEUE will be true if and only if the library call is a `const' call
2563 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2564 to the variable is_const in expand_call.
2566 NO_QUEUE must be true for const calls, because if it isn't, then
2567 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2568 and will be lost if the libcall sequence is optimized away.
2570 NO_QUEUE must be false for non-const calls, because if it isn't, the
2571 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2572 optimized. For instance, the instruction scheduler may incorrectly
2573 move memory references across the non-const call. */
2576 emit_library_call
VPROTO((rtx orgfun
, int no_queue
, enum machine_mode outmode
,
2579 #ifndef ANSI_PROTOTYPES
2582 enum machine_mode outmode
;
2586 /* Total size in bytes of all the stack-parms scanned so far. */
2587 struct args_size args_size
;
2588 /* Size of arguments before any adjustments (such as rounding). */
2589 struct args_size original_args_size
;
2590 register int argnum
;
2594 struct args_size alignment_pad
;
2596 CUMULATIVE_ARGS args_so_far
;
2597 struct arg
{ rtx value
; enum machine_mode mode
; rtx reg
; int partial
;
2598 struct args_size offset
; struct args_size size
; rtx save_area
; };
2600 int old_inhibit_defer_pop
= inhibit_defer_pop
;
2601 rtx call_fusage
= 0;
2602 int reg_parm_stack_space
= 0;
2603 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2604 /* Define the boundary of the register parm stack space that needs to be
2606 int low_to_save
= -1, high_to_save
= 0;
2607 rtx save_area
= 0; /* Place that it is saved */
2610 #ifdef ACCUMULATE_OUTGOING_ARGS
2611 int initial_highest_arg_in_use
= highest_outgoing_arg_in_use
;
2612 char *initial_stack_usage_map
= stack_usage_map
;
2616 #ifdef REG_PARM_STACK_SPACE
2617 /* Size of the stack reserved for parameter registers. */
2618 #ifdef MAYBE_REG_PARM_STACK_SPACE
2619 reg_parm_stack_space
= MAYBE_REG_PARM_STACK_SPACE
;
2621 reg_parm_stack_space
= REG_PARM_STACK_SPACE ((tree
) 0);
2625 VA_START (p
, nargs
);
2627 #ifndef ANSI_PROTOTYPES
2628 orgfun
= va_arg (p
, rtx
);
2629 no_queue
= va_arg (p
, int);
2630 outmode
= va_arg (p
, enum machine_mode
);
2631 nargs
= va_arg (p
, int);
2636 /* Copy all the libcall-arguments out of the varargs data
2637 and into a vector ARGVEC.
2639 Compute how to pass each argument. We only support a very small subset
2640 of the full argument passing conventions to limit complexity here since
2641 library functions shouldn't have many args. */
2643 argvec
= (struct arg
*) alloca (nargs
* sizeof (struct arg
));
2644 bzero ((char *) argvec
, nargs
* sizeof (struct arg
));
2647 INIT_CUMULATIVE_ARGS (args_so_far
, NULL_TREE
, fun
, 0);
2649 args_size
.constant
= 0;
2654 for (count
= 0; count
< nargs
; count
++)
2656 rtx val
= va_arg (p
, rtx
);
2657 enum machine_mode mode
= va_arg (p
, enum machine_mode
);
2659 /* We cannot convert the arg value to the mode the library wants here;
2660 must do it earlier where we know the signedness of the arg. */
2662 || (GET_MODE (val
) != mode
&& GET_MODE (val
) != VOIDmode
))
2665 /* On some machines, there's no way to pass a float to a library fcn.
2666 Pass it as a double instead. */
2667 #ifdef LIBGCC_NEEDS_DOUBLE
2668 if (LIBGCC_NEEDS_DOUBLE
&& mode
== SFmode
)
2669 val
= convert_modes (DFmode
, SFmode
, val
, 0), mode
= DFmode
;
2672 /* There's no need to call protect_from_queue, because
2673 either emit_move_insn or emit_push_insn will do that. */
2675 /* Make sure it is a reasonable operand for a move or push insn. */
2676 if (GET_CODE (val
) != REG
&& GET_CODE (val
) != MEM
2677 && ! (CONSTANT_P (val
) && LEGITIMATE_CONSTANT_P (val
)))
2678 val
= force_operand (val
, NULL_RTX
);
2680 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2681 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far
, mode
, NULL_TREE
, 1))
2683 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2684 be viewed as just an efficiency improvement. */
2685 rtx slot
= assign_stack_temp (mode
, GET_MODE_SIZE (mode
), 0);
2686 emit_move_insn (slot
, val
);
2687 val
= force_operand (XEXP (slot
, 0), NULL_RTX
);
2692 argvec
[count
].value
= val
;
2693 argvec
[count
].mode
= mode
;
2695 argvec
[count
].reg
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
2696 if (argvec
[count
].reg
&& GET_CODE (argvec
[count
].reg
) == PARALLEL
)
2698 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2699 argvec
[count
].partial
2700 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, mode
, NULL_TREE
, 1);
2702 argvec
[count
].partial
= 0;
2705 locate_and_pad_parm (mode
, NULL_TREE
,
2706 argvec
[count
].reg
&& argvec
[count
].partial
== 0,
2707 NULL_TREE
, &args_size
, &argvec
[count
].offset
,
2708 &argvec
[count
].size
, &alignment_pad
);
2710 if (argvec
[count
].size
.var
)
2713 if (reg_parm_stack_space
== 0 && argvec
[count
].partial
)
2714 argvec
[count
].size
.constant
-= argvec
[count
].partial
* UNITS_PER_WORD
;
2716 if (argvec
[count
].reg
== 0 || argvec
[count
].partial
!= 0
2717 || reg_parm_stack_space
> 0)
2718 args_size
.constant
+= argvec
[count
].size
.constant
;
2720 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, (tree
) 0, 1);
2724 #ifdef FINAL_REG_PARM_STACK_SPACE
2725 reg_parm_stack_space
= FINAL_REG_PARM_STACK_SPACE (args_size
.constant
,
2729 /* If this machine requires an external definition for library
2730 functions, write one out. */
2731 assemble_external_libcall (fun
);
2733 original_args_size
= args_size
;
2734 #ifdef PREFERRED_STACK_BOUNDARY
2735 args_size
.constant
= (((args_size
.constant
+ (STACK_BYTES
- 1))
2736 / STACK_BYTES
) * STACK_BYTES
);
2739 args_size
.constant
= MAX (args_size
.constant
,
2740 reg_parm_stack_space
);
2742 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2743 args_size
.constant
-= reg_parm_stack_space
;
2746 if (args_size
.constant
> current_function_outgoing_args_size
)
2747 current_function_outgoing_args_size
= args_size
.constant
;
2749 #ifdef ACCUMULATE_OUTGOING_ARGS
2750 /* Since the stack pointer will never be pushed, it is possible for
2751 the evaluation of a parm to clobber something we have already
2752 written to the stack. Since most function calls on RISC machines
2753 do not use the stack, this is uncommon, but must work correctly.
2755 Therefore, we save any area of the stack that was already written
2756 and that we are using. Here we set up to do this by making a new
2757 stack usage map from the old one.
2759 Another approach might be to try to reorder the argument
2760 evaluations to avoid this conflicting stack usage. */
2762 needed
= args_size
.constant
;
2764 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2765 /* Since we will be writing into the entire argument area, the
2766 map must be allocated for its entire size, not just the part that
2767 is the responsibility of the caller. */
2768 needed
+= reg_parm_stack_space
;
2771 #ifdef ARGS_GROW_DOWNWARD
2772 highest_outgoing_arg_in_use
= MAX (initial_highest_arg_in_use
,
2775 highest_outgoing_arg_in_use
= MAX (initial_highest_arg_in_use
,
2778 stack_usage_map
= (char *) alloca (highest_outgoing_arg_in_use
);
2780 if (initial_highest_arg_in_use
)
2781 bcopy (initial_stack_usage_map
, stack_usage_map
,
2782 initial_highest_arg_in_use
);
2784 if (initial_highest_arg_in_use
!= highest_outgoing_arg_in_use
)
2785 bzero (&stack_usage_map
[initial_highest_arg_in_use
],
2786 highest_outgoing_arg_in_use
- initial_highest_arg_in_use
);
2789 /* The address of the outgoing argument list must not be copied to a
2790 register here, because argblock would be left pointing to the
2791 wrong place after the call to allocate_dynamic_stack_space below.
2794 argblock
= virtual_outgoing_args_rtx
;
2795 #else /* not ACCUMULATE_OUTGOING_ARGS */
2796 #ifndef PUSH_ROUNDING
2797 argblock
= push_block (GEN_INT (args_size
.constant
), 0, 0);
2801 #ifdef PUSH_ARGS_REVERSED
2802 #ifdef PREFERRED_STACK_BOUNDARY
2803 /* If we push args individually in reverse order, perform stack alignment
2804 before the first push (the last arg). */
2806 anti_adjust_stack (GEN_INT (args_size
.constant
2807 - original_args_size
.constant
));
2811 #ifdef PUSH_ARGS_REVERSED
2819 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2820 /* The argument list is the property of the called routine and it
2821 may clobber it. If the fixed area has been used for previous
2822 parameters, we must save and restore it.
2824 Here we compute the boundary of the that needs to be saved, if any. */
2826 #ifdef ARGS_GROW_DOWNWARD
2827 for (count
= 0; count
< reg_parm_stack_space
+ 1; count
++)
2829 for (count
= 0; count
< reg_parm_stack_space
; count
++)
2832 if (count
>= highest_outgoing_arg_in_use
2833 || stack_usage_map
[count
] == 0)
2836 if (low_to_save
== -1)
2837 low_to_save
= count
;
2839 high_to_save
= count
;
2842 if (low_to_save
>= 0)
2844 int num_to_save
= high_to_save
- low_to_save
+ 1;
2845 enum machine_mode save_mode
2846 = mode_for_size (num_to_save
* BITS_PER_UNIT
, MODE_INT
, 1);
2849 /* If we don't have the required alignment, must do this in BLKmode. */
2850 if ((low_to_save
& (MIN (GET_MODE_SIZE (save_mode
),
2851 BIGGEST_ALIGNMENT
/ UNITS_PER_WORD
) - 1)))
2852 save_mode
= BLKmode
;
2854 #ifdef ARGS_GROW_DOWNWARD
2855 stack_area
= gen_rtx_MEM (save_mode
,
2856 memory_address (save_mode
,
2857 plus_constant (argblock
,
2860 stack_area
= gen_rtx_MEM (save_mode
,
2861 memory_address (save_mode
,
2862 plus_constant (argblock
,
2865 if (save_mode
== BLKmode
)
2867 save_area
= assign_stack_temp (BLKmode
, num_to_save
, 0);
2868 emit_block_move (validize_mem (save_area
), stack_area
,
2869 GEN_INT (num_to_save
),
2870 PARM_BOUNDARY
/ BITS_PER_UNIT
);
2874 save_area
= gen_reg_rtx (save_mode
);
2875 emit_move_insn (save_area
, stack_area
);
2880 /* Push the args that need to be pushed. */
2882 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2883 are to be pushed. */
2884 for (count
= 0; count
< nargs
; count
++, argnum
+= inc
)
2886 register enum machine_mode mode
= argvec
[argnum
].mode
;
2887 register rtx val
= argvec
[argnum
].value
;
2888 rtx reg
= argvec
[argnum
].reg
;
2889 int partial
= argvec
[argnum
].partial
;
2890 #ifdef ACCUMULATE_OUTGOING_ARGS
2891 int lower_bound
, upper_bound
, i
;
2894 if (! (reg
!= 0 && partial
== 0))
2896 #ifdef ACCUMULATE_OUTGOING_ARGS
2897 /* If this is being stored into a pre-allocated, fixed-size, stack
2898 area, save any previous data at that location. */
2900 #ifdef ARGS_GROW_DOWNWARD
2901 /* stack_slot is negative, but we want to index stack_usage_map
2902 with positive values. */
2903 upper_bound
= -argvec
[argnum
].offset
.constant
+ 1;
2904 lower_bound
= upper_bound
- argvec
[argnum
].size
.constant
;
2906 lower_bound
= argvec
[argnum
].offset
.constant
;
2907 upper_bound
= lower_bound
+ argvec
[argnum
].size
.constant
;
2910 for (i
= lower_bound
; i
< upper_bound
; i
++)
2911 if (stack_usage_map
[i
]
2912 /* Don't store things in the fixed argument area at this point;
2913 it has already been saved. */
2914 && i
> reg_parm_stack_space
)
2917 if (i
!= upper_bound
)
2919 /* We need to make a save area. See what mode we can make it. */
2920 enum machine_mode save_mode
2921 = mode_for_size (argvec
[argnum
].size
.constant
* BITS_PER_UNIT
,
2928 plus_constant (argblock
,
2929 argvec
[argnum
].offset
.constant
)));
2931 argvec
[argnum
].save_area
= gen_reg_rtx (save_mode
);
2932 emit_move_insn (argvec
[argnum
].save_area
, stack_area
);
2935 emit_push_insn (val
, mode
, NULL_TREE
, NULL_RTX
, 0, partial
, reg
, 0,
2936 argblock
, GEN_INT (argvec
[argnum
].offset
.constant
),
2937 reg_parm_stack_space
, ARGS_SIZE_RTX (alignment_pad
));
2939 #ifdef ACCUMULATE_OUTGOING_ARGS
2940 /* Now mark the segment we just used. */
2941 for (i
= lower_bound
; i
< upper_bound
; i
++)
2942 stack_usage_map
[i
] = 1;
2949 #ifndef PUSH_ARGS_REVERSED
2950 #ifdef PREFERRED_STACK_BOUNDARY
2951 /* If we pushed args in forward order, perform stack alignment
2952 after pushing the last arg. */
2954 anti_adjust_stack (GEN_INT (args_size
.constant
2955 - original_args_size
.constant
));
2959 #ifdef PUSH_ARGS_REVERSED
2965 fun
= prepare_call_address (fun
, NULL_TREE
, &call_fusage
, 0);
2967 /* Now load any reg parms into their regs. */
2969 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2970 are to be pushed. */
2971 for (count
= 0; count
< nargs
; count
++, argnum
+= inc
)
2973 register rtx val
= argvec
[argnum
].value
;
2974 rtx reg
= argvec
[argnum
].reg
;
2975 int partial
= argvec
[argnum
].partial
;
2977 if (reg
!= 0 && partial
== 0)
2978 emit_move_insn (reg
, val
);
2982 /* For version 1.37, try deleting this entirely. */
2986 /* Any regs containing parms remain in use through the call. */
2987 for (count
= 0; count
< nargs
; count
++)
2988 if (argvec
[count
].reg
!= 0)
2989 use_reg (&call_fusage
, argvec
[count
].reg
);
2991 /* Don't allow popping to be deferred, since then
2992 cse'ing of library calls could delete a call and leave the pop. */
2995 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2996 will set inhibit_defer_pop to that value. */
2998 /* The return type is needed to decide how many bytes the function pops.
2999 Signedness plays no role in that, so for simplicity, we pretend it's
3000 always signed. We also assume that the list of arguments passed has
3001 no impact, so we pretend it is unknown. */
3004 get_identifier (XSTR (orgfun
, 0)),
3005 build_function_type (outmode
== VOIDmode
? void_type_node
3006 : type_for_mode (outmode
, 0), NULL_TREE
),
3007 original_args_size
.constant
, args_size
.constant
, 0,
3008 FUNCTION_ARG (args_so_far
, VOIDmode
, void_type_node
, 1),
3009 outmode
!= VOIDmode
? hard_libcall_value (outmode
) : NULL_RTX
,
3010 old_inhibit_defer_pop
+ 1, call_fusage
, no_queue
);
3014 /* Now restore inhibit_defer_pop to its actual original value. */
3017 #ifdef ACCUMULATE_OUTGOING_ARGS
3018 #ifdef REG_PARM_STACK_SPACE
3021 enum machine_mode save_mode
= GET_MODE (save_area
);
3022 #ifdef ARGS_GROW_DOWNWARD
3024 = gen_rtx_MEM (save_mode
,
3025 memory_address (save_mode
,
3026 plus_constant (argblock
,
3030 = gen_rtx_MEM (save_mode
,
3031 memory_address (save_mode
,
3032 plus_constant (argblock
, low_to_save
)));
3035 if (save_mode
!= BLKmode
)
3036 emit_move_insn (stack_area
, save_area
);
3038 emit_block_move (stack_area
, validize_mem (save_area
),
3039 GEN_INT (high_to_save
- low_to_save
+ 1),
3040 PARM_BOUNDARY
/ BITS_PER_UNIT
);
3044 /* If we saved any argument areas, restore them. */
3045 for (count
= 0; count
< nargs
; count
++)
3046 if (argvec
[count
].save_area
)
3048 enum machine_mode save_mode
= GET_MODE (argvec
[count
].save_area
);
3050 = gen_rtx_MEM (save_mode
,
3053 plus_constant (argblock
,
3054 argvec
[count
].offset
.constant
)));
3056 emit_move_insn (stack_area
, argvec
[count
].save_area
);
3059 highest_outgoing_arg_in_use
= initial_highest_arg_in_use
;
3060 stack_usage_map
= initial_stack_usage_map
;
3064 /* Like emit_library_call except that an extra argument, VALUE,
3065 comes second and says where to store the result.
3066 (If VALUE is zero, this function chooses a convenient way
3067 to return the value.
3069 This function returns an rtx for where the value is to be found.
3070 If VALUE is nonzero, VALUE is returned. */
3073 emit_library_call_value
VPROTO((rtx orgfun
, rtx value
, int no_queue
,
3074 enum machine_mode outmode
, int nargs
, ...))
3076 #ifndef ANSI_PROTOTYPES
3080 enum machine_mode outmode
;
3084 /* Total size in bytes of all the stack-parms scanned so far. */
3085 struct args_size args_size
;
3086 /* Size of arguments before any adjustments (such as rounding). */
3087 struct args_size original_args_size
;
3088 register int argnum
;
3092 struct args_size alignment_pad
;
3094 CUMULATIVE_ARGS args_so_far
;
3095 struct arg
{ rtx value
; enum machine_mode mode
; rtx reg
; int partial
;
3096 struct args_size offset
; struct args_size size
; rtx save_area
; };
3098 int old_inhibit_defer_pop
= inhibit_defer_pop
;
3099 rtx call_fusage
= 0;
3101 int pcc_struct_value
= 0;
3102 int struct_value_size
= 0;
3104 int reg_parm_stack_space
= 0;
3105 #ifdef ACCUMULATE_OUTGOING_ARGS
3109 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3110 /* Define the boundary of the register parm stack space that needs to be
3112 int low_to_save
= -1, high_to_save
= 0;
3113 rtx save_area
= 0; /* Place that it is saved */
3116 #ifdef ACCUMULATE_OUTGOING_ARGS
3117 /* Size of the stack reserved for parameter registers. */
3118 int initial_highest_arg_in_use
= highest_outgoing_arg_in_use
;
3119 char *initial_stack_usage_map
= stack_usage_map
;
3122 #ifdef REG_PARM_STACK_SPACE
3123 #ifdef MAYBE_REG_PARM_STACK_SPACE
3124 reg_parm_stack_space
= MAYBE_REG_PARM_STACK_SPACE
;
3126 reg_parm_stack_space
= REG_PARM_STACK_SPACE ((tree
) 0);
3130 VA_START (p
, nargs
);
3132 #ifndef ANSI_PROTOTYPES
3133 orgfun
= va_arg (p
, rtx
);
3134 value
= va_arg (p
, rtx
);
3135 no_queue
= va_arg (p
, int);
3136 outmode
= va_arg (p
, enum machine_mode
);
3137 nargs
= va_arg (p
, int);
3140 is_const
= no_queue
;
3143 /* If this kind of value comes back in memory,
3144 decide where in memory it should come back. */
3145 if (aggregate_value_p (type_for_mode (outmode
, 0)))
3147 #ifdef PCC_STATIC_STRUCT_RETURN
3149 = hard_function_value (build_pointer_type (type_for_mode (outmode
, 0)),
3151 mem_value
= gen_rtx_MEM (outmode
, pointer_reg
);
3152 pcc_struct_value
= 1;
3154 value
= gen_reg_rtx (outmode
);
3155 #else /* not PCC_STATIC_STRUCT_RETURN */
3156 struct_value_size
= GET_MODE_SIZE (outmode
);
3157 if (value
!= 0 && GET_CODE (value
) == MEM
)
3160 mem_value
= assign_stack_temp (outmode
, GET_MODE_SIZE (outmode
), 0);
3163 /* This call returns a big structure. */
3167 /* ??? Unfinished: must pass the memory address as an argument. */
3169 /* Copy all the libcall-arguments out of the varargs data
3170 and into a vector ARGVEC.
3172 Compute how to pass each argument. We only support a very small subset
3173 of the full argument passing conventions to limit complexity here since
3174 library functions shouldn't have many args. */
3176 argvec
= (struct arg
*) alloca ((nargs
+ 1) * sizeof (struct arg
));
3177 bzero ((char *) argvec
, (nargs
+ 1) * sizeof (struct arg
));
3179 INIT_CUMULATIVE_ARGS (args_so_far
, NULL_TREE
, fun
, 0);
3181 args_size
.constant
= 0;
3188 /* If there's a structure value address to be passed,
3189 either pass it in the special place, or pass it as an extra argument. */
3190 if (mem_value
&& struct_value_rtx
== 0 && ! pcc_struct_value
)
3192 rtx addr
= XEXP (mem_value
, 0);
3195 /* Make sure it is a reasonable operand for a move or push insn. */
3196 if (GET_CODE (addr
) != REG
&& GET_CODE (addr
) != MEM
3197 && ! (CONSTANT_P (addr
) && LEGITIMATE_CONSTANT_P (addr
)))
3198 addr
= force_operand (addr
, NULL_RTX
);
3200 argvec
[count
].value
= addr
;
3201 argvec
[count
].mode
= Pmode
;
3202 argvec
[count
].partial
= 0;
3204 argvec
[count
].reg
= FUNCTION_ARG (args_so_far
, Pmode
, NULL_TREE
, 1);
3205 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3206 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, Pmode
, NULL_TREE
, 1))
3210 locate_and_pad_parm (Pmode
, NULL_TREE
,
3211 argvec
[count
].reg
&& argvec
[count
].partial
== 0,
3212 NULL_TREE
, &args_size
, &argvec
[count
].offset
,
3213 &argvec
[count
].size
, &alignment_pad
);
3216 if (argvec
[count
].reg
== 0 || argvec
[count
].partial
!= 0
3217 || reg_parm_stack_space
> 0)
3218 args_size
.constant
+= argvec
[count
].size
.constant
;
3220 FUNCTION_ARG_ADVANCE (args_so_far
, Pmode
, (tree
) 0, 1);
3225 for (; count
< nargs
; count
++)
3227 rtx val
= va_arg (p
, rtx
);
3228 enum machine_mode mode
= va_arg (p
, enum machine_mode
);
3230 /* We cannot convert the arg value to the mode the library wants here;
3231 must do it earlier where we know the signedness of the arg. */
3233 || (GET_MODE (val
) != mode
&& GET_MODE (val
) != VOIDmode
))
3236 /* On some machines, there's no way to pass a float to a library fcn.
3237 Pass it as a double instead. */
3238 #ifdef LIBGCC_NEEDS_DOUBLE
3239 if (LIBGCC_NEEDS_DOUBLE
&& mode
== SFmode
)
3240 val
= convert_modes (DFmode
, SFmode
, val
, 0), mode
= DFmode
;
3243 /* There's no need to call protect_from_queue, because
3244 either emit_move_insn or emit_push_insn will do that. */
3246 /* Make sure it is a reasonable operand for a move or push insn. */
3247 if (GET_CODE (val
) != REG
&& GET_CODE (val
) != MEM
3248 && ! (CONSTANT_P (val
) && LEGITIMATE_CONSTANT_P (val
)))
3249 val
= force_operand (val
, NULL_RTX
);
3251 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3252 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far
, mode
, NULL_TREE
, 1))
3254 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
3255 be viewed as just an efficiency improvement. */
3256 rtx slot
= assign_stack_temp (mode
, GET_MODE_SIZE (mode
), 0);
3257 emit_move_insn (slot
, val
);
3258 val
= XEXP (slot
, 0);
3263 argvec
[count
].value
= val
;
3264 argvec
[count
].mode
= mode
;
3266 argvec
[count
].reg
= FUNCTION_ARG (args_so_far
, mode
, NULL_TREE
, 1);
3267 if (argvec
[count
].reg
&& GET_CODE (argvec
[count
].reg
) == PARALLEL
)
3269 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3270 argvec
[count
].partial
3271 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far
, mode
, NULL_TREE
, 1);
3273 argvec
[count
].partial
= 0;
3276 locate_and_pad_parm (mode
, NULL_TREE
,
3277 argvec
[count
].reg
&& argvec
[count
].partial
== 0,
3278 NULL_TREE
, &args_size
, &argvec
[count
].offset
,
3279 &argvec
[count
].size
, &alignment_pad
);
3281 if (argvec
[count
].size
.var
)
3284 if (reg_parm_stack_space
== 0 && argvec
[count
].partial
)
3285 argvec
[count
].size
.constant
-= argvec
[count
].partial
* UNITS_PER_WORD
;
3287 if (argvec
[count
].reg
== 0 || argvec
[count
].partial
!= 0
3288 || reg_parm_stack_space
> 0)
3289 args_size
.constant
+= argvec
[count
].size
.constant
;
3291 FUNCTION_ARG_ADVANCE (args_so_far
, mode
, (tree
) 0, 1);
3295 #ifdef FINAL_REG_PARM_STACK_SPACE
3296 reg_parm_stack_space
= FINAL_REG_PARM_STACK_SPACE (args_size
.constant
,
3299 /* If this machine requires an external definition for library
3300 functions, write one out. */
3301 assemble_external_libcall (fun
);
3303 original_args_size
= args_size
;
3304 #ifdef PREFERRED_STACK_BOUNDARY
3305 args_size
.constant
= (((args_size
.constant
+ (STACK_BYTES
- 1))
3306 / STACK_BYTES
) * STACK_BYTES
);
3309 args_size
.constant
= MAX (args_size
.constant
,
3310 reg_parm_stack_space
);
3312 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3313 args_size
.constant
-= reg_parm_stack_space
;
3316 if (args_size
.constant
> current_function_outgoing_args_size
)
3317 current_function_outgoing_args_size
= args_size
.constant
;
3319 #ifdef ACCUMULATE_OUTGOING_ARGS
3320 /* Since the stack pointer will never be pushed, it is possible for
3321 the evaluation of a parm to clobber something we have already
3322 written to the stack. Since most function calls on RISC machines
3323 do not use the stack, this is uncommon, but must work correctly.
3325 Therefore, we save any area of the stack that was already written
3326 and that we are using. Here we set up to do this by making a new
3327 stack usage map from the old one.
3329 Another approach might be to try to reorder the argument
3330 evaluations to avoid this conflicting stack usage. */
3332 needed
= args_size
.constant
;
3334 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3335 /* Since we will be writing into the entire argument area, the
3336 map must be allocated for its entire size, not just the part that
3337 is the responsibility of the caller. */
3338 needed
+= reg_parm_stack_space
;
3341 #ifdef ARGS_GROW_DOWNWARD
3342 highest_outgoing_arg_in_use
= MAX (initial_highest_arg_in_use
,
3345 highest_outgoing_arg_in_use
= MAX (initial_highest_arg_in_use
,
3348 stack_usage_map
= (char *) alloca (highest_outgoing_arg_in_use
);
3350 if (initial_highest_arg_in_use
)
3351 bcopy (initial_stack_usage_map
, stack_usage_map
,
3352 initial_highest_arg_in_use
);
3354 if (initial_highest_arg_in_use
!= highest_outgoing_arg_in_use
)
3355 bzero (&stack_usage_map
[initial_highest_arg_in_use
],
3356 highest_outgoing_arg_in_use
- initial_highest_arg_in_use
);
3359 /* The address of the outgoing argument list must not be copied to a
3360 register here, because argblock would be left pointing to the
3361 wrong place after the call to allocate_dynamic_stack_space below.
3364 argblock
= virtual_outgoing_args_rtx
;
3365 #else /* not ACCUMULATE_OUTGOING_ARGS */
3366 #ifndef PUSH_ROUNDING
3367 argblock
= push_block (GEN_INT (args_size
.constant
), 0, 0);
3371 #ifdef PUSH_ARGS_REVERSED
3372 #ifdef PREFERRED_STACK_BOUNDARY
3373 /* If we push args individually in reverse order, perform stack alignment
3374 before the first push (the last arg). */
3376 anti_adjust_stack (GEN_INT (args_size
.constant
3377 - original_args_size
.constant
));
3381 #ifdef PUSH_ARGS_REVERSED
3389 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3390 /* The argument list is the property of the called routine and it
3391 may clobber it. If the fixed area has been used for previous
3392 parameters, we must save and restore it.
3394 Here we compute the boundary of the that needs to be saved, if any. */
3396 #ifdef ARGS_GROW_DOWNWARD
3397 for (count
= 0; count
< reg_parm_stack_space
+ 1; count
++)
3399 for (count
= 0; count
< reg_parm_stack_space
; count
++)
3402 if (count
>= highest_outgoing_arg_in_use
3403 || stack_usage_map
[count
] == 0)
3406 if (low_to_save
== -1)
3407 low_to_save
= count
;
3409 high_to_save
= count
;
3412 if (low_to_save
>= 0)
3414 int num_to_save
= high_to_save
- low_to_save
+ 1;
3415 enum machine_mode save_mode
3416 = mode_for_size (num_to_save
* BITS_PER_UNIT
, MODE_INT
, 1);
3419 /* If we don't have the required alignment, must do this in BLKmode. */
3420 if ((low_to_save
& (MIN (GET_MODE_SIZE (save_mode
),
3421 BIGGEST_ALIGNMENT
/ UNITS_PER_WORD
) - 1)))
3422 save_mode
= BLKmode
;
3424 #ifdef ARGS_GROW_DOWNWARD
3425 stack_area
= gen_rtx_MEM (save_mode
,
3426 memory_address (save_mode
,
3427 plus_constant (argblock
,
3430 stack_area
= gen_rtx_MEM (save_mode
,
3431 memory_address (save_mode
,
3432 plus_constant (argblock
,
3435 if (save_mode
== BLKmode
)
3437 save_area
= assign_stack_temp (BLKmode
, num_to_save
, 0);
3438 emit_block_move (validize_mem (save_area
), stack_area
,
3439 GEN_INT (num_to_save
),
3440 PARM_BOUNDARY
/ BITS_PER_UNIT
);
3444 save_area
= gen_reg_rtx (save_mode
);
3445 emit_move_insn (save_area
, stack_area
);
3450 /* Push the args that need to be pushed. */
3452 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3453 are to be pushed. */
3454 for (count
= 0; count
< nargs
; count
++, argnum
+= inc
)
3456 register enum machine_mode mode
= argvec
[argnum
].mode
;
3457 register rtx val
= argvec
[argnum
].value
;
3458 rtx reg
= argvec
[argnum
].reg
;
3459 int partial
= argvec
[argnum
].partial
;
3460 #ifdef ACCUMULATE_OUTGOING_ARGS
3461 int lower_bound
, upper_bound
, i
;
3464 if (! (reg
!= 0 && partial
== 0))
3466 #ifdef ACCUMULATE_OUTGOING_ARGS
3467 /* If this is being stored into a pre-allocated, fixed-size, stack
3468 area, save any previous data at that location. */
3470 #ifdef ARGS_GROW_DOWNWARD
3471 /* stack_slot is negative, but we want to index stack_usage_map
3472 with positive values. */
3473 upper_bound
= -argvec
[argnum
].offset
.constant
+ 1;
3474 lower_bound
= upper_bound
- argvec
[argnum
].size
.constant
;
3476 lower_bound
= argvec
[argnum
].offset
.constant
;
3477 upper_bound
= lower_bound
+ argvec
[argnum
].size
.constant
;
3480 for (i
= lower_bound
; i
< upper_bound
; i
++)
3481 if (stack_usage_map
[i
]
3482 /* Don't store things in the fixed argument area at this point;
3483 it has already been saved. */
3484 && i
> reg_parm_stack_space
)
3487 if (i
!= upper_bound
)
3489 /* We need to make a save area. See what mode we can make it. */
3490 enum machine_mode save_mode
3491 = mode_for_size (argvec
[argnum
].size
.constant
* BITS_PER_UNIT
,
3498 plus_constant (argblock
,
3499 argvec
[argnum
].offset
.constant
)));
3500 argvec
[argnum
].save_area
= gen_reg_rtx (save_mode
);
3502 emit_move_insn (argvec
[argnum
].save_area
, stack_area
);
3505 emit_push_insn (val
, mode
, NULL_TREE
, NULL_RTX
, 0, partial
, reg
, 0,
3506 argblock
, GEN_INT (argvec
[argnum
].offset
.constant
),
3507 reg_parm_stack_space
, ARGS_SIZE_RTX (alignment_pad
));
3509 #ifdef ACCUMULATE_OUTGOING_ARGS
3510 /* Now mark the segment we just used. */
3511 for (i
= lower_bound
; i
< upper_bound
; i
++)
3512 stack_usage_map
[i
] = 1;
3519 #ifndef PUSH_ARGS_REVERSED
3520 #ifdef PREFERRED_STACK_BOUNDARY
3521 /* If we pushed args in forward order, perform stack alignment
3522 after pushing the last arg. */
3524 anti_adjust_stack (GEN_INT (args_size
.constant
3525 - original_args_size
.constant
));
3529 #ifdef PUSH_ARGS_REVERSED
3535 fun
= prepare_call_address (fun
, NULL_TREE
, &call_fusage
, 0);
3537 /* Now load any reg parms into their regs. */
3539 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3540 are to be pushed. */
3541 for (count
= 0; count
< nargs
; count
++, argnum
+= inc
)
3543 register rtx val
= argvec
[argnum
].value
;
3544 rtx reg
= argvec
[argnum
].reg
;
3545 int partial
= argvec
[argnum
].partial
;
3547 if (reg
!= 0 && partial
== 0)
3548 emit_move_insn (reg
, val
);
3553 /* For version 1.37, try deleting this entirely. */
3558 /* Any regs containing parms remain in use through the call. */
3559 for (count
= 0; count
< nargs
; count
++)
3560 if (argvec
[count
].reg
!= 0)
3561 use_reg (&call_fusage
, argvec
[count
].reg
);
3563 /* Pass the function the address in which to return a structure value. */
3564 if (mem_value
!= 0 && struct_value_rtx
!= 0 && ! pcc_struct_value
)
3566 emit_move_insn (struct_value_rtx
,
3568 force_operand (XEXP (mem_value
, 0),
3570 if (GET_CODE (struct_value_rtx
) == REG
)
3571 use_reg (&call_fusage
, struct_value_rtx
);
3574 /* Don't allow popping to be deferred, since then
3575 cse'ing of library calls could delete a call and leave the pop. */
3578 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3579 will set inhibit_defer_pop to that value. */
3580 /* See the comment in emit_library_call about the function type we build
3584 get_identifier (XSTR (orgfun
, 0)),
3585 build_function_type (type_for_mode (outmode
, 0), NULL_TREE
),
3586 original_args_size
.constant
, args_size
.constant
,
3588 FUNCTION_ARG (args_so_far
, VOIDmode
, void_type_node
, 1),
3589 mem_value
== 0 ? hard_libcall_value (outmode
) : NULL_RTX
,
3590 old_inhibit_defer_pop
+ 1, call_fusage
, is_const
);
3592 /* Now restore inhibit_defer_pop to its actual original value. */
3597 /* Copy the value to the right place. */
3598 if (outmode
!= VOIDmode
)
3604 if (value
!= mem_value
)
3605 emit_move_insn (value
, mem_value
);
3607 else if (value
!= 0)
3608 emit_move_insn (value
, hard_libcall_value (outmode
));
3610 value
= hard_libcall_value (outmode
);
3613 #ifdef ACCUMULATE_OUTGOING_ARGS
3614 #ifdef REG_PARM_STACK_SPACE
3617 enum machine_mode save_mode
= GET_MODE (save_area
);
3618 #ifdef ARGS_GROW_DOWNWARD
3620 = gen_rtx_MEM (save_mode
,
3621 memory_address (save_mode
,
3622 plus_constant (argblock
,
3626 = gen_rtx_MEM (save_mode
,
3627 memory_address (save_mode
,
3628 plus_constant (argblock
, low_to_save
)));
3630 if (save_mode
!= BLKmode
)
3631 emit_move_insn (stack_area
, save_area
);
3633 emit_block_move (stack_area
, validize_mem (save_area
),
3634 GEN_INT (high_to_save
- low_to_save
+ 1),
3635 PARM_BOUNDARY
/ BITS_PER_UNIT
);
3639 /* If we saved any argument areas, restore them. */
3640 for (count
= 0; count
< nargs
; count
++)
3641 if (argvec
[count
].save_area
)
3643 enum machine_mode save_mode
= GET_MODE (argvec
[count
].save_area
);
3645 = gen_rtx_MEM (save_mode
,
3648 plus_constant (argblock
,
3649 argvec
[count
].offset
.constant
)));
3651 emit_move_insn (stack_area
, argvec
[count
].save_area
);
3654 highest_outgoing_arg_in_use
= initial_highest_arg_in_use
;
3655 stack_usage_map
= initial_stack_usage_map
;
3662 /* Return an rtx which represents a suitable home on the stack
3663 given TYPE, the type of the argument looking for a home.
3664 This is called only for BLKmode arguments.
3666 SIZE is the size needed for this target.
3667 ARGS_ADDR is the address of the bottom of the argument block for this call.
3668 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3669 if this machine uses push insns. */
3672 target_for_arg (type
, size
, args_addr
, offset
)
3676 struct args_size offset
;
3679 rtx offset_rtx
= ARGS_SIZE_RTX (offset
);
3681 /* We do not call memory_address if possible,
3682 because we want to address as close to the stack
3683 as possible. For non-variable sized arguments,
3684 this will be stack-pointer relative addressing. */
3685 if (GET_CODE (offset_rtx
) == CONST_INT
)
3686 target
= plus_constant (args_addr
, INTVAL (offset_rtx
));
3689 /* I have no idea how to guarantee that this
3690 will work in the presence of register parameters. */
3691 target
= gen_rtx_PLUS (Pmode
, args_addr
, offset_rtx
);
3692 target
= memory_address (QImode
, target
);
3695 return gen_rtx_MEM (BLKmode
, target
);
3699 /* Store a single argument for a function call
3700 into the register or memory area where it must be passed.
3701 *ARG describes the argument value and where to pass it.
3703 ARGBLOCK is the address of the stack-block for all the arguments,
3704 or 0 on a machine where arguments are pushed individually.
3706 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3707 so must be careful about how the stack is used.
3709 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3710 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3711 that we need not worry about saving and restoring the stack.
3713 FNDECL is the declaration of the function we are calling. */
3716 store_one_arg (arg
, argblock
, may_be_alloca
, variable_size
,
3717 reg_parm_stack_space
)
3718 struct arg_data
*arg
;
3721 int variable_size ATTRIBUTE_UNUSED
;
3722 int reg_parm_stack_space
;
3724 register tree pval
= arg
->tree_value
;
3728 #ifdef ACCUMULATE_OUTGOING_ARGS
3729 int i
, lower_bound
= 0, upper_bound
= 0;
3732 if (TREE_CODE (pval
) == ERROR_MARK
)
3735 /* Push a new temporary level for any temporaries we make for
3739 #ifdef ACCUMULATE_OUTGOING_ARGS
3740 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3741 save any previous data at that location. */
3742 if (argblock
&& ! variable_size
&& arg
->stack
)
3744 #ifdef ARGS_GROW_DOWNWARD
3745 /* stack_slot is negative, but we want to index stack_usage_map
3746 with positive values. */
3747 if (GET_CODE (XEXP (arg
->stack_slot
, 0)) == PLUS
)
3748 upper_bound
= -INTVAL (XEXP (XEXP (arg
->stack_slot
, 0), 1)) + 1;
3752 lower_bound
= upper_bound
- arg
->size
.constant
;
3754 if (GET_CODE (XEXP (arg
->stack_slot
, 0)) == PLUS
)
3755 lower_bound
= INTVAL (XEXP (XEXP (arg
->stack_slot
, 0), 1));
3759 upper_bound
= lower_bound
+ arg
->size
.constant
;
3762 for (i
= lower_bound
; i
< upper_bound
; i
++)
3763 if (stack_usage_map
[i
]
3764 /* Don't store things in the fixed argument area at this point;
3765 it has already been saved. */
3766 && i
> reg_parm_stack_space
)
3769 if (i
!= upper_bound
)
3771 /* We need to make a save area. See what mode we can make it. */
3772 enum machine_mode save_mode
3773 = mode_for_size (arg
->size
.constant
* BITS_PER_UNIT
, MODE_INT
, 1);
3775 = gen_rtx_MEM (save_mode
,
3776 memory_address (save_mode
,
3777 XEXP (arg
->stack_slot
, 0)));
3779 if (save_mode
== BLKmode
)
3781 arg
->save_area
= assign_stack_temp (BLKmode
,
3782 arg
->size
.constant
, 0);
3783 MEM_SET_IN_STRUCT_P (arg
->save_area
,
3784 AGGREGATE_TYPE_P (TREE_TYPE
3785 (arg
->tree_value
)));
3786 preserve_temp_slots (arg
->save_area
);
3787 emit_block_move (validize_mem (arg
->save_area
), stack_area
,
3788 GEN_INT (arg
->size
.constant
),
3789 PARM_BOUNDARY
/ BITS_PER_UNIT
);
3793 arg
->save_area
= gen_reg_rtx (save_mode
);
3794 emit_move_insn (arg
->save_area
, stack_area
);
3799 /* Now that we have saved any slots that will be overwritten by this
3800 store, mark all slots this store will use. We must do this before
3801 we actually expand the argument since the expansion itself may
3802 trigger library calls which might need to use the same stack slot. */
3803 if (argblock
&& ! variable_size
&& arg
->stack
)
3804 for (i
= lower_bound
; i
< upper_bound
; i
++)
3805 stack_usage_map
[i
] = 1;
3808 /* If this isn't going to be placed on both the stack and in registers,
3809 set up the register and number of words. */
3810 if (! arg
->pass_on_stack
)
3811 reg
= arg
->reg
, partial
= arg
->partial
;
3813 if (reg
!= 0 && partial
== 0)
3814 /* Being passed entirely in a register. We shouldn't be called in
3818 /* If this arg needs special alignment, don't load the registers
3820 if (arg
->n_aligned_regs
!= 0)
3823 /* If this is being passed partially in a register, we can't evaluate
3824 it directly into its stack slot. Otherwise, we can. */
3825 if (arg
->value
== 0)
3827 #ifdef ACCUMULATE_OUTGOING_ARGS
3828 /* stack_arg_under_construction is nonzero if a function argument is
3829 being evaluated directly into the outgoing argument list and
3830 expand_call must take special action to preserve the argument list
3831 if it is called recursively.
3833 For scalar function arguments stack_usage_map is sufficient to
3834 determine which stack slots must be saved and restored. Scalar
3835 arguments in general have pass_on_stack == 0.
3837 If this argument is initialized by a function which takes the
3838 address of the argument (a C++ constructor or a C function
3839 returning a BLKmode structure), then stack_usage_map is
3840 insufficient and expand_call must push the stack around the
3841 function call. Such arguments have pass_on_stack == 1.
3843 Note that it is always safe to set stack_arg_under_construction,
3844 but this generates suboptimal code if set when not needed. */
3846 if (arg
->pass_on_stack
)
3847 stack_arg_under_construction
++;
3849 arg
->value
= expand_expr (pval
,
3851 || TYPE_MODE (TREE_TYPE (pval
)) != arg
->mode
)
3852 ? NULL_RTX
: arg
->stack
,
3855 /* If we are promoting object (or for any other reason) the mode
3856 doesn't agree, convert the mode. */
3858 if (arg
->mode
!= TYPE_MODE (TREE_TYPE (pval
)))
3859 arg
->value
= convert_modes (arg
->mode
, TYPE_MODE (TREE_TYPE (pval
)),
3860 arg
->value
, arg
->unsignedp
);
3862 #ifdef ACCUMULATE_OUTGOING_ARGS
3863 if (arg
->pass_on_stack
)
3864 stack_arg_under_construction
--;
3868 /* Don't allow anything left on stack from computation
3869 of argument to alloca. */
3871 do_pending_stack_adjust ();
3873 if (arg
->value
== arg
->stack
)
3875 /* If the value is already in the stack slot, we are done. */
3876 if (current_function_check_memory_usage
&& GET_CODE (arg
->stack
) == MEM
)
3878 emit_library_call (chkr_set_right_libfunc
, 1, VOIDmode
, 3,
3879 XEXP (arg
->stack
, 0), Pmode
,
3880 ARGS_SIZE_RTX (arg
->size
),
3881 TYPE_MODE (sizetype
),
3882 GEN_INT (MEMORY_USE_RW
),
3883 TYPE_MODE (integer_type_node
));
3886 else if (arg
->mode
!= BLKmode
)
3890 /* Argument is a scalar, not entirely passed in registers.
3891 (If part is passed in registers, arg->partial says how much
3892 and emit_push_insn will take care of putting it there.)
3894 Push it, and if its size is less than the
3895 amount of space allocated to it,
3896 also bump stack pointer by the additional space.
3897 Note that in C the default argument promotions
3898 will prevent such mismatches. */
3900 size
= GET_MODE_SIZE (arg
->mode
);
3901 /* Compute how much space the push instruction will push.
3902 On many machines, pushing a byte will advance the stack
3903 pointer by a halfword. */
3904 #ifdef PUSH_ROUNDING
3905 size
= PUSH_ROUNDING (size
);
3909 /* Compute how much space the argument should get:
3910 round up to a multiple of the alignment for arguments. */
3911 if (none
!= FUNCTION_ARG_PADDING (arg
->mode
, TREE_TYPE (pval
)))
3912 used
= (((size
+ PARM_BOUNDARY
/ BITS_PER_UNIT
- 1)
3913 / (PARM_BOUNDARY
/ BITS_PER_UNIT
))
3914 * (PARM_BOUNDARY
/ BITS_PER_UNIT
));
3916 /* This isn't already where we want it on the stack, so put it there.
3917 This can either be done with push or copy insns. */
3918 emit_push_insn (arg
->value
, arg
->mode
, TREE_TYPE (pval
), NULL_RTX
, 0,
3919 partial
, reg
, used
- size
, argblock
,
3920 ARGS_SIZE_RTX (arg
->offset
), reg_parm_stack_space
,
3921 ARGS_SIZE_RTX (arg
->alignment_pad
));
3926 /* BLKmode, at least partly to be pushed. */
3928 register int excess
;
3931 /* Pushing a nonscalar.
3932 If part is passed in registers, PARTIAL says how much
3933 and emit_push_insn will take care of putting it there. */
3935 /* Round its size up to a multiple
3936 of the allocation unit for arguments. */
3938 if (arg
->size
.var
!= 0)
3941 size_rtx
= ARGS_SIZE_RTX (arg
->size
);
3945 /* PUSH_ROUNDING has no effect on us, because
3946 emit_push_insn for BLKmode is careful to avoid it. */
3947 excess
= (arg
->size
.constant
- int_size_in_bytes (TREE_TYPE (pval
))
3948 + partial
* UNITS_PER_WORD
);
3949 size_rtx
= expr_size (pval
);
3952 emit_push_insn (arg
->value
, arg
->mode
, TREE_TYPE (pval
), size_rtx
,
3953 TYPE_ALIGN (TREE_TYPE (pval
)) / BITS_PER_UNIT
, partial
,
3954 reg
, excess
, argblock
, ARGS_SIZE_RTX (arg
->offset
),
3955 reg_parm_stack_space
,
3956 ARGS_SIZE_RTX (arg
->alignment_pad
));
3960 /* Unless this is a partially-in-register argument, the argument is now
3963 ??? Note that this can change arg->value from arg->stack to
3964 arg->stack_slot and it matters when they are not the same.
3965 It isn't totally clear that this is correct in all cases. */
3967 arg
->value
= arg
->stack_slot
;
3969 /* Once we have pushed something, pops can't safely
3970 be deferred during the rest of the arguments. */
3973 /* ANSI doesn't require a sequence point here,
3974 but PCC has one, so this will avoid some problems. */
3977 /* Free any temporary slots made in processing this argument. Show
3978 that we might have taken the address of something and pushed that
3980 preserve_temp_slots (NULL_RTX
);