allow all arm targets to use -mstructure-size-boundary=XX
[official-gcc.git] / gcc / calls.c
blob3b4b1b209210d8236c5084095ecddbc6914b03a2
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 92-97, 1998, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "rtl.h"
24 #include "tree.h"
25 #include "flags.h"
26 #include "expr.h"
27 #include "function.h"
28 #include "regs.h"
29 #include "insn-flags.h"
30 #include "toplev.h"
31 #include "output.h"
32 #include "tm_p.h"
34 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
35 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
36 #endif
38 /* Decide whether a function's arguments should be processed
39 from first to last or from last to first.
41 They should if the stack and args grow in opposite directions, but
42 only if we have push insns. */
44 #ifdef PUSH_ROUNDING
46 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
47 #define PUSH_ARGS_REVERSED /* If it's last to first */
48 #endif
50 #endif
52 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
53 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
55 /* Data structure and subroutines used within expand_call. */
57 struct arg_data
59 /* Tree node for this argument. */
60 tree tree_value;
61 /* Mode for value; TYPE_MODE unless promoted. */
62 enum machine_mode mode;
63 /* Current RTL value for argument, or 0 if it isn't precomputed. */
64 rtx value;
65 /* Initially-compute RTL value for argument; only for const functions. */
66 rtx initial_value;
67 /* Register to pass this argument in, 0 if passed on stack, or an
68 PARALLEL if the arg is to be copied into multiple non-contiguous
69 registers. */
70 rtx reg;
71 /* If REG was promoted from the actual mode of the argument expression,
72 indicates whether the promotion is sign- or zero-extended. */
73 int unsignedp;
74 /* Number of registers to use. 0 means put the whole arg in registers.
75 Also 0 if not passed in registers. */
76 int partial;
77 /* Non-zero if argument must be passed on stack.
78 Note that some arguments may be passed on the stack
79 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
80 pass_on_stack identifies arguments that *cannot* go in registers. */
81 int pass_on_stack;
82 /* Offset of this argument from beginning of stack-args. */
83 struct args_size offset;
84 /* Similar, but offset to the start of the stack slot. Different from
85 OFFSET if this arg pads downward. */
86 struct args_size slot_offset;
87 /* Size of this argument on the stack, rounded up for any padding it gets,
88 parts of the argument passed in registers do not count.
89 If REG_PARM_STACK_SPACE is defined, then register parms
90 are counted here as well. */
91 struct args_size size;
92 /* Location on the stack at which parameter should be stored. The store
93 has already been done if STACK == VALUE. */
94 rtx stack;
95 /* Location on the stack of the start of this argument slot. This can
96 differ from STACK if this arg pads downward. This location is known
97 to be aligned to FUNCTION_ARG_BOUNDARY. */
98 rtx stack_slot;
99 #ifdef ACCUMULATE_OUTGOING_ARGS
100 /* Place that this stack area has been saved, if needed. */
101 rtx save_area;
102 #endif
103 /* If an argument's alignment does not permit direct copying into registers,
104 copy in smaller-sized pieces into pseudos. These are stored in a
105 block pointed to by this field. The next field says how many
106 word-sized pseudos we made. */
107 rtx *aligned_regs;
108 int n_aligned_regs;
111 #ifdef ACCUMULATE_OUTGOING_ARGS
112 /* A vector of one char per byte of stack space. A byte if non-zero if
113 the corresponding stack location has been used.
114 This vector is used to prevent a function call within an argument from
115 clobbering any stack already set up. */
116 static char *stack_usage_map;
118 /* Size of STACK_USAGE_MAP. */
119 static int highest_outgoing_arg_in_use;
121 /* stack_arg_under_construction is nonzero when an argument may be
122 initialized with a constructor call (including a C function that
123 returns a BLKmode struct) and expand_call must take special action
124 to make sure the object being constructed does not overlap the
125 argument list for the constructor call. */
126 int stack_arg_under_construction;
127 #endif
129 static int calls_function PROTO ((tree, int));
130 static int calls_function_1 PROTO ((tree, int));
131 static void emit_call_1 PROTO ((rtx, tree, tree, HOST_WIDE_INT,
132 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
133 rtx, int, rtx, int));
134 static void special_function_p PROTO ((char *, tree, int *, int *,
135 int *, int *));
136 static void precompute_register_parameters PROTO ((int, struct arg_data *,
137 int *));
138 static void store_one_arg PROTO ((struct arg_data *, rtx, int, int,
139 int));
140 static void store_unaligned_arguments_into_pseudos PROTO ((struct arg_data *,
141 int));
142 static int finalize_must_preallocate PROTO ((int, int,
143 struct arg_data *,
144 struct args_size *));
145 static void precompute_arguments PROTO ((int, int, int,
146 struct arg_data *,
147 struct args_size *));
148 static int compute_argument_block_size PROTO ((int,
149 struct args_size *));
150 static void initialize_argument_information PROTO ((int,
151 struct arg_data *,
152 struct args_size *,
153 int, tree, tree,
154 CUMULATIVE_ARGS *,
155 int, rtx *, int *,
156 int *, int *));
157 static void compute_argument_addresses PROTO ((struct arg_data *,
158 rtx, int));
159 static rtx rtx_for_function_call PROTO ((tree, tree));
160 static void load_register_parameters PROTO ((struct arg_data *,
161 int, rtx *));
163 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
164 static rtx save_fixed_argument_area PROTO ((int, rtx, int *, int *));
165 static void restore_fixed_argument_area PROTO ((rtx, rtx, int, int));
166 #endif
168 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
169 `alloca'.
171 If WHICH is 0, return 1 if EXP contains a call to any function.
172 Actually, we only need return 1 if evaluating EXP would require pushing
173 arguments on the stack, but that is too difficult to compute, so we just
174 assume any function call might require the stack. */
176 static tree calls_function_save_exprs;
178 static int
179 calls_function (exp, which)
180 tree exp;
181 int which;
183 int val;
184 calls_function_save_exprs = 0;
185 val = calls_function_1 (exp, which);
186 calls_function_save_exprs = 0;
187 return val;
190 static int
191 calls_function_1 (exp, which)
192 tree exp;
193 int which;
195 register int i;
196 enum tree_code code = TREE_CODE (exp);
197 int type = TREE_CODE_CLASS (code);
198 int length = tree_code_length[(int) code];
200 /* If this code is language-specific, we don't know what it will do. */
201 if ((int) code >= NUM_TREE_CODES)
202 return 1;
204 /* Only expressions and references can contain calls. */
205 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
206 && type != 'b')
207 return 0;
209 switch (code)
211 case CALL_EXPR:
212 if (which == 0)
213 return 1;
214 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
215 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
216 == FUNCTION_DECL))
218 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
220 if ((DECL_BUILT_IN (fndecl)
221 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
222 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
223 || (DECL_SAVED_INSNS (fndecl)
224 && DECL_SAVED_INSNS (fndecl)->calls_alloca))
225 return 1;
228 /* Third operand is RTL. */
229 length = 2;
230 break;
232 case SAVE_EXPR:
233 if (SAVE_EXPR_RTL (exp) != 0)
234 return 0;
235 if (value_member (exp, calls_function_save_exprs))
236 return 0;
237 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
238 calls_function_save_exprs);
239 return (TREE_OPERAND (exp, 0) != 0
240 && calls_function_1 (TREE_OPERAND (exp, 0), which));
242 case BLOCK:
244 register tree local;
246 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
247 if (DECL_INITIAL (local) != 0
248 && calls_function_1 (DECL_INITIAL (local), which))
249 return 1;
252 register tree subblock;
254 for (subblock = BLOCK_SUBBLOCKS (exp);
255 subblock;
256 subblock = TREE_CHAIN (subblock))
257 if (calls_function_1 (subblock, which))
258 return 1;
260 return 0;
262 case METHOD_CALL_EXPR:
263 length = 3;
264 break;
266 case WITH_CLEANUP_EXPR:
267 length = 1;
268 break;
270 case RTL_EXPR:
271 return 0;
273 default:
274 break;
277 for (i = 0; i < length; i++)
278 if (TREE_OPERAND (exp, i) != 0
279 && calls_function_1 (TREE_OPERAND (exp, i), which))
280 return 1;
282 return 0;
285 /* Force FUNEXP into a form suitable for the address of a CALL,
286 and return that as an rtx. Also load the static chain register
287 if FNDECL is a nested function.
289 CALL_FUSAGE points to a variable holding the prospective
290 CALL_INSN_FUNCTION_USAGE information. */
293 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
294 rtx funexp;
295 tree fndecl;
296 rtx *call_fusage;
297 int reg_parm_seen;
299 rtx static_chain_value = 0;
301 funexp = protect_from_queue (funexp, 0);
303 if (fndecl != 0)
304 /* Get possible static chain value for nested function in C. */
305 static_chain_value = lookup_static_chain (fndecl);
307 /* Make a valid memory address and copy constants thru pseudo-regs,
308 but not for a constant address if -fno-function-cse. */
309 if (GET_CODE (funexp) != SYMBOL_REF)
310 /* If we are using registers for parameters, force the
311 function address into a register now. */
312 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
313 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
314 : memory_address (FUNCTION_MODE, funexp));
315 else
317 #ifndef NO_FUNCTION_CSE
318 if (optimize && ! flag_no_function_cse)
319 #ifdef NO_RECURSIVE_FUNCTION_CSE
320 if (fndecl != current_function_decl)
321 #endif
322 funexp = force_reg (Pmode, funexp);
323 #endif
326 if (static_chain_value != 0)
328 emit_move_insn (static_chain_rtx, static_chain_value);
330 if (GET_CODE (static_chain_rtx) == REG)
331 use_reg (call_fusage, static_chain_rtx);
334 return funexp;
337 /* Generate instructions to call function FUNEXP,
338 and optionally pop the results.
339 The CALL_INSN is the first insn generated.
341 FNDECL is the declaration node of the function. This is given to the
342 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
344 FUNTYPE is the data type of the function. This is given to the macro
345 RETURN_POPS_ARGS to determine whether this function pops its own args.
346 We used to allow an identifier for library functions, but that doesn't
347 work when the return type is an aggregate type and the calling convention
348 says that the pointer to this aggregate is to be popped by the callee.
350 STACK_SIZE is the number of bytes of arguments on the stack,
351 rounded up to PREFERRED_STACK_BOUNDARY; zero if the size is variable.
352 This is both to put into the call insn and
353 to generate explicit popping code if necessary.
355 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
356 It is zero if this call doesn't want a structure value.
358 NEXT_ARG_REG is the rtx that results from executing
359 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
360 just after all the args have had their registers assigned.
361 This could be whatever you like, but normally it is the first
362 arg-register beyond those used for args in this call,
363 or 0 if all the arg-registers are used in this call.
364 It is passed on to `gen_call' so you can put this info in the call insn.
366 VALREG is a hard register in which a value is returned,
367 or 0 if the call does not return a value.
369 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
370 the args to this call were processed.
371 We restore `inhibit_defer_pop' to that value.
373 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
374 denote registers used by the called function.
376 IS_CONST is true if this is a `const' call. */
378 static void
379 emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
380 struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
381 call_fusage, is_const)
382 rtx funexp;
383 tree fndecl ATTRIBUTE_UNUSED;
384 tree funtype ATTRIBUTE_UNUSED;
385 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED;
386 HOST_WIDE_INT rounded_stack_size;
387 HOST_WIDE_INT struct_value_size;
388 rtx next_arg_reg;
389 rtx valreg;
390 int old_inhibit_defer_pop;
391 rtx call_fusage;
392 int is_const;
394 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
395 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
396 rtx call_insn;
397 #ifndef ACCUMULATE_OUTGOING_ARGS
398 int already_popped = 0;
399 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
400 #endif
402 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
403 and we don't want to load it into a register as an optimization,
404 because prepare_call_address already did it if it should be done. */
405 if (GET_CODE (funexp) != SYMBOL_REF)
406 funexp = memory_address (FUNCTION_MODE, funexp);
408 #ifndef ACCUMULATE_OUTGOING_ARGS
409 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
410 /* If the target has "call" or "call_value" insns, then prefer them
411 if no arguments are actually popped. If the target does not have
412 "call" or "call_value" insns, then we must use the popping versions
413 even if the call has no arguments to pop. */
414 #if defined (HAVE_call) && defined (HAVE_call_value)
415 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
416 && n_popped > 0)
417 #else
418 if (HAVE_call_pop && HAVE_call_value_pop)
419 #endif
421 rtx n_pop = GEN_INT (n_popped);
422 rtx pat;
424 /* If this subroutine pops its own args, record that in the call insn
425 if possible, for the sake of frame pointer elimination. */
427 if (valreg)
428 pat = gen_call_value_pop (valreg,
429 gen_rtx_MEM (FUNCTION_MODE, funexp),
430 rounded_stack_size_rtx, next_arg_reg, n_pop);
431 else
432 pat = gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
433 rounded_stack_size_rtx, next_arg_reg, n_pop);
435 emit_call_insn (pat);
436 already_popped = 1;
438 else
439 #endif
440 #endif
442 #if defined (HAVE_call) && defined (HAVE_call_value)
443 if (HAVE_call && HAVE_call_value)
445 if (valreg)
446 emit_call_insn (gen_call_value (valreg,
447 gen_rtx_MEM (FUNCTION_MODE, funexp),
448 rounded_stack_size_rtx, next_arg_reg,
449 NULL_RTX));
450 else
451 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, funexp),
452 rounded_stack_size_rtx, next_arg_reg,
453 struct_value_size_rtx));
455 else
456 #endif
457 abort ();
459 /* Find the CALL insn we just emitted. */
460 for (call_insn = get_last_insn ();
461 call_insn && GET_CODE (call_insn) != CALL_INSN;
462 call_insn = PREV_INSN (call_insn))
465 if (! call_insn)
466 abort ();
468 /* Put the register usage information on the CALL. If there is already
469 some usage information, put ours at the end. */
470 if (CALL_INSN_FUNCTION_USAGE (call_insn))
472 rtx link;
474 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
475 link = XEXP (link, 1))
478 XEXP (link, 1) = call_fusage;
480 else
481 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
483 /* If this is a const call, then set the insn's unchanging bit. */
484 if (is_const)
485 CONST_CALL_P (call_insn) = 1;
487 /* Restore this now, so that we do defer pops for this call's args
488 if the context of the call as a whole permits. */
489 inhibit_defer_pop = old_inhibit_defer_pop;
491 #ifndef ACCUMULATE_OUTGOING_ARGS
492 /* If returning from the subroutine does not automatically pop the args,
493 we need an instruction to pop them sooner or later.
494 Perhaps do it now; perhaps just record how much space to pop later.
496 If returning from the subroutine does pop the args, indicate that the
497 stack pointer will be changed. */
499 if (n_popped > 0)
501 if (!already_popped)
502 CALL_INSN_FUNCTION_USAGE (call_insn)
503 = gen_rtx_EXPR_LIST (VOIDmode,
504 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
505 CALL_INSN_FUNCTION_USAGE (call_insn));
506 rounded_stack_size -= n_popped;
507 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
510 if (rounded_stack_size != 0)
512 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
513 pending_stack_adjust += rounded_stack_size;
514 else
515 adjust_stack (rounded_stack_size_rtx);
517 #endif
520 /* Determine if the function identified by NAME and FNDECL is one with
521 special properties we wish to know about.
523 For example, if the function might return more than one time (setjmp), then
524 set RETURNS_TWICE to a nonzero value.
526 Similarly set IS_LONGJMP for if the function is in the longjmp family.
528 Set IS_MALLOC for any of the standard memory allocation functions which
529 allocate from the heap.
531 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
532 space from the stack such as alloca. */
534 static void
535 special_function_p (name, fndecl, returns_twice, is_longjmp,
536 is_malloc, may_be_alloca)
537 char *name;
538 tree fndecl;
539 int *returns_twice;
540 int *is_longjmp;
541 int *is_malloc;
542 int *may_be_alloca;
544 *returns_twice = 0;
545 *is_longjmp = 0;
546 *is_malloc = 0;
547 *may_be_alloca = 0;
549 if (name != 0 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
550 /* Exclude functions not at the file scope, or not `extern',
551 since they are not the magic functions we would otherwise
552 think they are. */
553 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
555 char *tname = name;
557 /* We assume that alloca will always be called by name. It
558 makes no sense to pass it as a pointer-to-function to
559 anything that does not understand its behavior. */
560 *may_be_alloca
561 = (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
562 && name[0] == 'a'
563 && ! strcmp (name, "alloca"))
564 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
565 && name[0] == '_'
566 && ! strcmp (name, "__builtin_alloca"))));
568 /* Disregard prefix _, __ or __x. */
569 if (name[0] == '_')
571 if (name[1] == '_' && name[2] == 'x')
572 tname += 3;
573 else if (name[1] == '_')
574 tname += 2;
575 else
576 tname += 1;
579 if (tname[0] == 's')
581 *returns_twice
582 = ((tname[1] == 'e'
583 && (! strcmp (tname, "setjmp")
584 || ! strcmp (tname, "setjmp_syscall")))
585 || (tname[1] == 'i'
586 && ! strcmp (tname, "sigsetjmp"))
587 || (tname[1] == 'a'
588 && ! strcmp (tname, "savectx")));
589 if (tname[1] == 'i'
590 && ! strcmp (tname, "siglongjmp"))
591 *is_longjmp = 1;
593 else if ((tname[0] == 'q' && tname[1] == 's'
594 && ! strcmp (tname, "qsetjmp"))
595 || (tname[0] == 'v' && tname[1] == 'f'
596 && ! strcmp (tname, "vfork")))
597 *returns_twice = 1;
599 else if (tname[0] == 'l' && tname[1] == 'o'
600 && ! strcmp (tname, "longjmp"))
601 *is_longjmp = 1;
602 /* XXX should have "malloc" attribute on functions instead
603 of recognizing them by name. */
604 else if (! strcmp (tname, "malloc")
605 || ! strcmp (tname, "calloc")
606 || ! strcmp (tname, "realloc")
607 /* Note use of NAME rather than TNAME here. These functions
608 are only reserved when preceded with __. */
609 || ! strcmp (name, "__vn") /* mangled __builtin_vec_new */
610 || ! strcmp (name, "__nw") /* mangled __builtin_new */
611 || ! strcmp (name, "__builtin_new")
612 || ! strcmp (name, "__builtin_vec_new"))
613 *is_malloc = 1;
617 /* Precompute all register parameters as described by ARGS, storing values
618 into fields within the ARGS array.
620 NUM_ACTUALS indicates the total number elements in the ARGS array.
622 Set REG_PARM_SEEN if we encounter a register parameter. */
624 static void
625 precompute_register_parameters (num_actuals, args, reg_parm_seen)
626 int num_actuals;
627 struct arg_data *args;
628 int *reg_parm_seen;
630 int i;
632 *reg_parm_seen = 0;
634 for (i = 0; i < num_actuals; i++)
635 if (args[i].reg != 0 && ! args[i].pass_on_stack)
637 *reg_parm_seen = 1;
639 if (args[i].value == 0)
641 push_temp_slots ();
642 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
643 VOIDmode, 0);
644 preserve_temp_slots (args[i].value);
645 pop_temp_slots ();
647 /* ANSI doesn't require a sequence point here,
648 but PCC has one, so this will avoid some problems. */
649 emit_queue ();
652 /* If we are to promote the function arg to a wider mode,
653 do it now. */
655 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
656 args[i].value
657 = convert_modes (args[i].mode,
658 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
659 args[i].value, args[i].unsignedp);
661 /* If the value is expensive, and we are inside an appropriately
662 short loop, put the value into a pseudo and then put the pseudo
663 into the hard reg.
665 For small register classes, also do this if this call uses
666 register parameters. This is to avoid reload conflicts while
667 loading the parameters registers. */
669 if ((! (GET_CODE (args[i].value) == REG
670 || (GET_CODE (args[i].value) == SUBREG
671 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
672 && args[i].mode != BLKmode
673 && rtx_cost (args[i].value, SET) > 2
674 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
675 || preserve_subexpressions_p ()))
676 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
680 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
682 /* The argument list is the property of the called routine and it
683 may clobber it. If the fixed area has been used for previous
684 parameters, we must save and restore it. */
685 static rtx
686 save_fixed_argument_area (reg_parm_stack_space, argblock,
687 low_to_save, high_to_save)
688 int reg_parm_stack_space;
689 rtx argblock;
690 int *low_to_save;
691 int *high_to_save;
693 int i;
694 rtx save_area = NULL_RTX;
696 /* Compute the boundary of the that needs to be saved, if any. */
697 #ifdef ARGS_GROW_DOWNWARD
698 for (i = 0; i < reg_parm_stack_space + 1; i++)
699 #else
700 for (i = 0; i < reg_parm_stack_space; i++)
701 #endif
703 if (i >= highest_outgoing_arg_in_use
704 || stack_usage_map[i] == 0)
705 continue;
707 if (*low_to_save == -1)
708 *low_to_save = i;
710 *high_to_save = i;
713 if (*low_to_save >= 0)
715 int num_to_save = *high_to_save - *low_to_save + 1;
716 enum machine_mode save_mode
717 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
718 rtx stack_area;
720 /* If we don't have the required alignment, must do this in BLKmode. */
721 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
722 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
723 save_mode = BLKmode;
725 #ifdef ARGS_GROW_DOWNWARD
726 stack_area = gen_rtx_MEM (save_mode,
727 memory_address (save_mode,
728 plus_constant (argblock,
729 - *high_to_save)));
730 #else
731 stack_area = gen_rtx_MEM (save_mode,
732 memory_address (save_mode,
733 plus_constant (argblock,
734 *low_to_save)));
735 #endif
736 if (save_mode == BLKmode)
738 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
739 emit_block_move (validize_mem (save_area), stack_area,
740 GEN_INT (num_to_save),
741 PARM_BOUNDARY / BITS_PER_UNIT);
743 else
745 save_area = gen_reg_rtx (save_mode);
746 emit_move_insn (save_area, stack_area);
749 return save_area;
752 static void
753 restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
754 rtx save_area;
755 rtx argblock;
756 int high_to_save;
757 int low_to_save;
759 enum machine_mode save_mode = GET_MODE (save_area);
760 #ifdef ARGS_GROW_DOWNWARD
761 rtx stack_area
762 = gen_rtx_MEM (save_mode,
763 memory_address (save_mode,
764 plus_constant (argblock,
765 - high_to_save)));
766 #else
767 rtx stack_area
768 = gen_rtx_MEM (save_mode,
769 memory_address (save_mode,
770 plus_constant (argblock,
771 low_to_save)));
772 #endif
774 if (save_mode != BLKmode)
775 emit_move_insn (stack_area, save_area);
776 else
777 emit_block_move (stack_area, validize_mem (save_area),
778 GEN_INT (high_to_save - low_to_save + 1),
779 PARM_BOUNDARY / BITS_PER_UNIT);
781 #endif
783 /* If any elements in ARGS refer to parameters that are to be passed in
784 registers, but not in memory, and whose alignment does not permit a
785 direct copy into registers. Copy the values into a group of pseudos
786 which we will later copy into the appropriate hard registers.
788 Pseudos for each unaligned argument will be stored into the array
789 args[argnum].aligned_regs. The caller is responsible for deallocating
790 the aligned_regs array if it is nonzero. */
792 static void
793 store_unaligned_arguments_into_pseudos (args, num_actuals)
794 struct arg_data *args;
795 int num_actuals;
797 int i, j;
799 for (i = 0; i < num_actuals; i++)
800 if (args[i].reg != 0 && ! args[i].pass_on_stack
801 && args[i].mode == BLKmode
802 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
803 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
805 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
806 int big_endian_correction = 0;
808 args[i].n_aligned_regs
809 = args[i].partial ? args[i].partial
810 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
812 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
813 * args[i].n_aligned_regs);
815 /* Structures smaller than a word are aligned to the least
816 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
817 this means we must skip the empty high order bytes when
818 calculating the bit offset. */
819 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
820 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
822 for (j = 0; j < args[i].n_aligned_regs; j++)
824 rtx reg = gen_reg_rtx (word_mode);
825 rtx word = operand_subword_force (args[i].value, j, BLKmode);
826 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
827 int bitalign = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
829 args[i].aligned_regs[j] = reg;
831 /* There is no need to restrict this code to loading items
832 in TYPE_ALIGN sized hunks. The bitfield instructions can
833 load up entire word sized registers efficiently.
835 ??? This may not be needed anymore.
836 We use to emit a clobber here but that doesn't let later
837 passes optimize the instructions we emit. By storing 0 into
838 the register later passes know the first AND to zero out the
839 bitfield being set in the register is unnecessary. The store
840 of 0 will be deleted as will at least the first AND. */
842 emit_move_insn (reg, const0_rtx);
844 bytes -= bitsize / BITS_PER_UNIT;
845 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
846 extract_bit_field (word, bitsize, 0, 1,
847 NULL_RTX, word_mode,
848 word_mode,
849 bitalign / BITS_PER_UNIT,
850 BITS_PER_WORD),
851 bitalign / BITS_PER_UNIT, BITS_PER_WORD);
856 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
857 ACTPARMS.
859 NUM_ACTUALS is the total number of parameters.
861 N_NAMED_ARGS is the total number of named arguments.
863 FNDECL is the tree code for the target of this call (if known)
865 ARGS_SO_FAR holds state needed by the target to know where to place
866 the next argument.
868 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
869 for arguments which are passed in registers.
871 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
872 and may be modified by this routine.
874 OLD_PENDING_ADJ, MUST_PREALLOCATE and IS_CONST are pointers to integer
875 flags which may may be modified by this routine. */
877 static void
878 initialize_argument_information (num_actuals, args, args_size, n_named_args,
879 actparms, fndecl, args_so_far,
880 reg_parm_stack_space, old_stack_level,
881 old_pending_adj, must_preallocate, is_const)
882 int num_actuals ATTRIBUTE_UNUSED;
883 struct arg_data *args;
884 struct args_size *args_size;
885 int n_named_args ATTRIBUTE_UNUSED;
886 tree actparms;
887 tree fndecl;
888 CUMULATIVE_ARGS *args_so_far;
889 int reg_parm_stack_space;
890 rtx *old_stack_level;
891 int *old_pending_adj;
892 int *must_preallocate;
893 int *is_const;
895 /* 1 if scanning parms front to back, -1 if scanning back to front. */
896 int inc;
898 /* Count arg position in order args appear. */
899 int argpos;
901 int i;
902 tree p;
904 args_size->constant = 0;
905 args_size->var = 0;
907 /* In this loop, we consider args in the order they are written.
908 We fill up ARGS from the front or from the back if necessary
909 so that in any case the first arg to be pushed ends up at the front. */
911 #ifdef PUSH_ARGS_REVERSED
912 i = num_actuals - 1, inc = -1;
913 /* In this case, must reverse order of args
914 so that we compute and push the last arg first. */
915 #else
916 i = 0, inc = 1;
917 #endif
919 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
920 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
922 tree type = TREE_TYPE (TREE_VALUE (p));
923 int unsignedp;
924 enum machine_mode mode;
926 args[i].tree_value = TREE_VALUE (p);
928 /* Replace erroneous argument with constant zero. */
929 if (type == error_mark_node || TYPE_SIZE (type) == 0)
930 args[i].tree_value = integer_zero_node, type = integer_type_node;
932 /* If TYPE is a transparent union, pass things the way we would
933 pass the first field of the union. We have already verified that
934 the modes are the same. */
935 if (TYPE_TRANSPARENT_UNION (type))
936 type = TREE_TYPE (TYPE_FIELDS (type));
938 /* Decide where to pass this arg.
940 args[i].reg is nonzero if all or part is passed in registers.
942 args[i].partial is nonzero if part but not all is passed in registers,
943 and the exact value says how many words are passed in registers.
945 args[i].pass_on_stack is nonzero if the argument must at least be
946 computed on the stack. It may then be loaded back into registers
947 if args[i].reg is nonzero.
949 These decisions are driven by the FUNCTION_... macros and must agree
950 with those made by function.c. */
952 /* See if this argument should be passed by invisible reference. */
953 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
954 && contains_placeholder_p (TYPE_SIZE (type)))
955 || TREE_ADDRESSABLE (type)
956 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
957 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
958 type, argpos < n_named_args)
959 #endif
962 /* If we're compiling a thunk, pass through invisible
963 references instead of making a copy. */
964 if (current_function_is_thunk
965 #ifdef FUNCTION_ARG_CALLEE_COPIES
966 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
967 type, argpos < n_named_args)
968 /* If it's in a register, we must make a copy of it too. */
969 /* ??? Is this a sufficient test? Is there a better one? */
970 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
971 && REG_P (DECL_RTL (args[i].tree_value)))
972 && ! TREE_ADDRESSABLE (type))
973 #endif
976 /* C++ uses a TARGET_EXPR to indicate that we want to make a
977 new object from the argument. If we are passing by
978 invisible reference, the callee will do that for us, so we
979 can strip off the TARGET_EXPR. This is not always safe,
980 but it is safe in the only case where this is a useful
981 optimization; namely, when the argument is a plain object.
982 In that case, the frontend is just asking the backend to
983 make a bitwise copy of the argument. */
985 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
986 && (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND
987 (args[i].tree_value, 1)))
988 == 'd')
989 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
990 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
992 args[i].tree_value = build1 (ADDR_EXPR,
993 build_pointer_type (type),
994 args[i].tree_value);
995 type = build_pointer_type (type);
997 else
999 /* We make a copy of the object and pass the address to the
1000 function being called. */
1001 rtx copy;
1003 if (TYPE_SIZE (type) == 0
1004 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1005 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1006 && (TREE_INT_CST_HIGH (TYPE_SIZE (type)) != 0
1007 || (TREE_INT_CST_LOW (TYPE_SIZE (type))
1008 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
1010 /* This is a variable-sized object. Make space on the stack
1011 for it. */
1012 rtx size_rtx = expr_size (TREE_VALUE (p));
1014 if (*old_stack_level == 0)
1016 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1017 *old_pending_adj = pending_stack_adjust;
1018 pending_stack_adjust = 0;
1021 copy = gen_rtx_MEM (BLKmode,
1022 allocate_dynamic_stack_space (size_rtx,
1023 NULL_RTX,
1024 TYPE_ALIGN (type)));
1026 else
1028 int size = int_size_in_bytes (type);
1029 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
1032 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
1034 store_expr (args[i].tree_value, copy, 0);
1035 *is_const = 0;
1037 args[i].tree_value = build1 (ADDR_EXPR,
1038 build_pointer_type (type),
1039 make_tree (type, copy));
1040 type = build_pointer_type (type);
1044 mode = TYPE_MODE (type);
1045 unsignedp = TREE_UNSIGNED (type);
1047 #ifdef PROMOTE_FUNCTION_ARGS
1048 mode = promote_mode (type, mode, &unsignedp, 1);
1049 #endif
1051 args[i].unsignedp = unsignedp;
1052 args[i].mode = mode;
1053 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1054 argpos < n_named_args);
1055 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1056 if (args[i].reg)
1057 args[i].partial
1058 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1059 argpos < n_named_args);
1060 #endif
1062 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1064 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1065 it means that we are to pass this arg in the register(s) designated
1066 by the PARALLEL, but also to pass it in the stack. */
1067 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1068 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1069 args[i].pass_on_stack = 1;
1071 /* If this is an addressable type, we must preallocate the stack
1072 since we must evaluate the object into its final location.
1074 If this is to be passed in both registers and the stack, it is simpler
1075 to preallocate. */
1076 if (TREE_ADDRESSABLE (type)
1077 || (args[i].pass_on_stack && args[i].reg != 0))
1078 *must_preallocate = 1;
1080 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1081 we cannot consider this function call constant. */
1082 if (TREE_ADDRESSABLE (type))
1083 *is_const = 0;
1085 /* Compute the stack-size of this argument. */
1086 if (args[i].reg == 0 || args[i].partial != 0
1087 || reg_parm_stack_space > 0
1088 || args[i].pass_on_stack)
1089 locate_and_pad_parm (mode, type,
1090 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1092 #else
1093 args[i].reg != 0,
1094 #endif
1095 fndecl, args_size, &args[i].offset,
1096 &args[i].size);
1098 #ifndef ARGS_GROW_DOWNWARD
1099 args[i].slot_offset = *args_size;
1100 #endif
1102 /* If a part of the arg was put into registers,
1103 don't include that part in the amount pushed. */
1104 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1105 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1106 / (PARM_BOUNDARY / BITS_PER_UNIT)
1107 * (PARM_BOUNDARY / BITS_PER_UNIT));
1109 /* Update ARGS_SIZE, the total stack space for args so far. */
1111 args_size->constant += args[i].size.constant;
1112 if (args[i].size.var)
1114 ADD_PARM_SIZE (*args_size, args[i].size.var);
1117 /* Since the slot offset points to the bottom of the slot,
1118 we must record it after incrementing if the args grow down. */
1119 #ifdef ARGS_GROW_DOWNWARD
1120 args[i].slot_offset = *args_size;
1122 args[i].slot_offset.constant = -args_size->constant;
1123 if (args_size->var)
1125 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
1127 #endif
1129 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1130 have been used, etc. */
1132 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1133 argpos < n_named_args);
1137 /* Update ARGS_SIZE to contain the total size for the argument block.
1138 Return the original constant component of the argument block's size.
1140 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1141 for arguments passed in registers. */
1143 static int
1144 compute_argument_block_size (reg_parm_stack_space, args_size)
1145 int reg_parm_stack_space;
1146 struct args_size *args_size;
1148 int unadjusted_args_size = args_size->constant;
1150 /* Compute the actual size of the argument block required. The variable
1151 and constant sizes must be combined, the size may have to be rounded,
1152 and there may be a minimum required size. */
1154 if (args_size->var)
1156 args_size->var = ARGS_SIZE_TREE (*args_size);
1157 args_size->constant = 0;
1159 #ifdef PREFERRED_STACK_BOUNDARY
1160 if (PREFERRED_STACK_BOUNDARY != BITS_PER_UNIT)
1161 args_size->var = round_up (args_size->var, STACK_BYTES);
1162 #endif
1164 if (reg_parm_stack_space > 0)
1166 args_size->var
1167 = size_binop (MAX_EXPR, args_size->var,
1168 size_int (reg_parm_stack_space));
1170 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1171 /* The area corresponding to register parameters is not to count in
1172 the size of the block we need. So make the adjustment. */
1173 args_size->var
1174 = size_binop (MINUS_EXPR, args_size->var,
1175 size_int (reg_parm_stack_space));
1176 #endif
1179 else
1181 #ifdef PREFERRED_STACK_BOUNDARY
1182 args_size->constant = (((args_size->constant
1183 + pending_stack_adjust
1184 + STACK_BYTES - 1)
1185 / STACK_BYTES * STACK_BYTES)
1186 - pending_stack_adjust);
1187 #endif
1189 args_size->constant = MAX (args_size->constant,
1190 reg_parm_stack_space);
1192 #ifdef MAYBE_REG_PARM_STACK_SPACE
1193 if (reg_parm_stack_space == 0)
1194 args_size->constant = 0;
1195 #endif
1197 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1198 args_size->constant -= reg_parm_stack_space;
1199 #endif
1201 return unadjusted_args_size;
1204 /* Precompute parameters as needed for a function call.
1206 IS_CONST indicates the target function is a pure function.
1208 MUST_PREALLOCATE indicates that we must preallocate stack space for
1209 any stack arguments.
1211 NUM_ACTUALS is the number of arguments.
1213 ARGS is an array containing information for each argument; this routine
1214 fills in the INITIAL_VALUE and VALUE fields for each precomputed argument.
1216 ARGS_SIZE contains information about the size of the arg list. */
1218 static void
1219 precompute_arguments (is_const, must_preallocate, num_actuals, args, args_size)
1220 int is_const;
1221 int must_preallocate;
1222 int num_actuals;
1223 struct arg_data *args;
1224 struct args_size *args_size;
1226 int i;
1228 /* If this function call is cse'able, precompute all the parameters.
1229 Note that if the parameter is constructed into a temporary, this will
1230 cause an additional copy because the parameter will be constructed
1231 into a temporary location and then copied into the outgoing arguments.
1232 If a parameter contains a call to alloca and this function uses the
1233 stack, precompute the parameter. */
1235 /* If we preallocated the stack space, and some arguments must be passed
1236 on the stack, then we must precompute any parameter which contains a
1237 function call which will store arguments on the stack.
1238 Otherwise, evaluating the parameter may clobber previous parameters
1239 which have already been stored into the stack. */
1241 for (i = 0; i < num_actuals; i++)
1242 if (is_const
1243 || ((args_size->var != 0 || args_size->constant != 0)
1244 && calls_function (args[i].tree_value, 1))
1245 || (must_preallocate
1246 && (args_size->var != 0 || args_size->constant != 0)
1247 && calls_function (args[i].tree_value, 0)))
1249 /* If this is an addressable type, we cannot pre-evaluate it. */
1250 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1251 abort ();
1253 push_temp_slots ();
1255 args[i].initial_value = args[i].value
1256 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1258 preserve_temp_slots (args[i].value);
1259 pop_temp_slots ();
1261 /* ANSI doesn't require a sequence point here,
1262 but PCC has one, so this will avoid some problems. */
1263 emit_queue ();
1265 args[i].initial_value = args[i].value
1266 = protect_from_queue (args[i].initial_value, 0);
1268 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1269 args[i].value
1270 = convert_modes (args[i].mode,
1271 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1272 args[i].value, args[i].unsignedp);
1276 /* Given the current state of MUST_PREALLOCATE and information about
1277 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1278 compute and return the final value for MUST_PREALLOCATE. */
1280 static int
1281 finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1282 int must_preallocate;
1283 int num_actuals;
1284 struct arg_data *args;
1285 struct args_size *args_size;
1287 /* See if we have or want to preallocate stack space.
1289 If we would have to push a partially-in-regs parm
1290 before other stack parms, preallocate stack space instead.
1292 If the size of some parm is not a multiple of the required stack
1293 alignment, we must preallocate.
1295 If the total size of arguments that would otherwise create a copy in
1296 a temporary (such as a CALL) is more than half the total argument list
1297 size, preallocation is faster.
1299 Another reason to preallocate is if we have a machine (like the m88k)
1300 where stack alignment is required to be maintained between every
1301 pair of insns, not just when the call is made. However, we assume here
1302 that such machines either do not have push insns (and hence preallocation
1303 would occur anyway) or the problem is taken care of with
1304 PUSH_ROUNDING. */
1306 if (! must_preallocate)
1308 int partial_seen = 0;
1309 int copy_to_evaluate_size = 0;
1310 int i;
1312 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1314 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1315 partial_seen = 1;
1316 else if (partial_seen && args[i].reg == 0)
1317 must_preallocate = 1;
1319 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1320 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1321 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1322 || TREE_CODE (args[i].tree_value) == COND_EXPR
1323 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1324 copy_to_evaluate_size
1325 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1328 if (copy_to_evaluate_size * 2 >= args_size->constant
1329 && args_size->constant > 0)
1330 must_preallocate = 1;
1332 return must_preallocate;
1335 /* If we preallocated stack space, compute the address of each argument
1336 and store it into the ARGS array.
1338 We need not ensure it is a valid memory address here; it will be
1339 validized when it is used.
1341 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1343 static void
1344 compute_argument_addresses (args, argblock, num_actuals)
1345 struct arg_data *args;
1346 rtx argblock;
1347 int num_actuals;
1349 if (argblock)
1351 rtx arg_reg = argblock;
1352 int i, arg_offset = 0;
1354 if (GET_CODE (argblock) == PLUS)
1355 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1357 for (i = 0; i < num_actuals; i++)
1359 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1360 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1361 rtx addr;
1363 /* Skip this parm if it will not be passed on the stack. */
1364 if (! args[i].pass_on_stack && args[i].reg != 0)
1365 continue;
1367 if (GET_CODE (offset) == CONST_INT)
1368 addr = plus_constant (arg_reg, INTVAL (offset));
1369 else
1370 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1372 addr = plus_constant (addr, arg_offset);
1373 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1374 MEM_SET_IN_STRUCT_P
1375 (args[i].stack,
1376 AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value)));
1378 if (GET_CODE (slot_offset) == CONST_INT)
1379 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1380 else
1381 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1383 addr = plus_constant (addr, arg_offset);
1384 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1389 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1390 in a call instruction.
1392 FNDECL is the tree node for the target function. For an indirect call
1393 FNDECL will be NULL_TREE.
1395 EXP is the CALL_EXPR for this call. */
1397 static rtx
1398 rtx_for_function_call (fndecl, exp)
1399 tree fndecl;
1400 tree exp;
1402 rtx funexp;
1404 /* Get the function to call, in the form of RTL. */
1405 if (fndecl)
1407 /* If this is the first use of the function, see if we need to
1408 make an external definition for it. */
1409 if (! TREE_USED (fndecl))
1411 assemble_external (fndecl);
1412 TREE_USED (fndecl) = 1;
1415 /* Get a SYMBOL_REF rtx for the function address. */
1416 funexp = XEXP (DECL_RTL (fndecl), 0);
1418 else
1419 /* Generate an rtx (probably a pseudo-register) for the address. */
1421 rtx funaddr;
1422 push_temp_slots ();
1423 funaddr = funexp =
1424 expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1425 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1427 /* Check the function is executable. */
1428 if (current_function_check_memory_usage)
1430 #ifdef POINTERS_EXTEND_UNSIGNED
1431 /* It might be OK to convert funexp in place, but there's
1432 a lot going on between here and when it happens naturally
1433 that this seems safer. */
1434 funaddr = convert_memory_address (Pmode, funexp);
1435 #endif
1436 emit_library_call (chkr_check_exec_libfunc, 1,
1437 VOIDmode, 1,
1438 funaddr, Pmode);
1440 emit_queue ();
1442 return funexp;
1445 /* Do the register loads required for any wholly-register parms or any
1446 parms which are passed both on the stack and in a register. Their
1447 expressions were already evaluated.
1449 Mark all register-parms as living through the call, putting these USE
1450 insns in the CALL_INSN_FUNCTION_USAGE field. */
1452 static void
1453 load_register_parameters (args, num_actuals, call_fusage)
1454 struct arg_data *args;
1455 int num_actuals;
1456 rtx *call_fusage;
1458 int i, j;
1460 #ifdef LOAD_ARGS_REVERSED
1461 for (i = num_actuals - 1; i >= 0; i--)
1462 #else
1463 for (i = 0; i < num_actuals; i++)
1464 #endif
1466 rtx reg = args[i].reg;
1467 int partial = args[i].partial;
1468 int nregs;
1470 if (reg)
1472 /* Set to non-negative if must move a word at a time, even if just
1473 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1474 we just use a normal move insn. This value can be zero if the
1475 argument is a zero size structure with no fields. */
1476 nregs = (partial ? partial
1477 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1478 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1479 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1480 : -1));
1482 /* Handle calls that pass values in multiple non-contiguous
1483 locations. The Irix 6 ABI has examples of this. */
1485 if (GET_CODE (reg) == PARALLEL)
1487 emit_group_load (reg, args[i].value,
1488 int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
1489 (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1490 / BITS_PER_UNIT));
1493 /* If simple case, just do move. If normal partial, store_one_arg
1494 has already loaded the register for us. In all other cases,
1495 load the register(s) from memory. */
1497 else if (nregs == -1)
1498 emit_move_insn (reg, args[i].value);
1500 /* If we have pre-computed the values to put in the registers in
1501 the case of non-aligned structures, copy them in now. */
1503 else if (args[i].n_aligned_regs != 0)
1504 for (j = 0; j < args[i].n_aligned_regs; j++)
1505 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1506 args[i].aligned_regs[j]);
1508 else if (partial == 0 || args[i].pass_on_stack)
1509 move_block_to_reg (REGNO (reg),
1510 validize_mem (args[i].value), nregs,
1511 args[i].mode);
1513 /* Handle calls that pass values in multiple non-contiguous
1514 locations. The Irix 6 ABI has examples of this. */
1515 if (GET_CODE (reg) == PARALLEL)
1516 use_group_regs (call_fusage, reg);
1517 else if (nregs == -1)
1518 use_reg (call_fusage, reg);
1519 else
1520 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1525 /* Generate all the code for a function call
1526 and return an rtx for its value.
1527 Store the value in TARGET (specified as an rtx) if convenient.
1528 If the value is stored in TARGET then TARGET is returned.
1529 If IGNORE is nonzero, then we ignore the value of the function call. */
1532 expand_call (exp, target, ignore)
1533 tree exp;
1534 rtx target;
1535 int ignore;
1537 /* List of actual parameters. */
1538 tree actparms = TREE_OPERAND (exp, 1);
1539 /* RTX for the function to be called. */
1540 rtx funexp;
1541 /* Data type of the function. */
1542 tree funtype;
1543 /* Declaration of the function being called,
1544 or 0 if the function is computed (not known by name). */
1545 tree fndecl = 0;
1546 char *name = 0;
1548 /* Register in which non-BLKmode value will be returned,
1549 or 0 if no value or if value is BLKmode. */
1550 rtx valreg;
1551 /* Address where we should return a BLKmode value;
1552 0 if value not BLKmode. */
1553 rtx structure_value_addr = 0;
1554 /* Nonzero if that address is being passed by treating it as
1555 an extra, implicit first parameter. Otherwise,
1556 it is passed by being copied directly into struct_value_rtx. */
1557 int structure_value_addr_parm = 0;
1558 /* Size of aggregate value wanted, or zero if none wanted
1559 or if we are using the non-reentrant PCC calling convention
1560 or expecting the value in registers. */
1561 HOST_WIDE_INT struct_value_size = 0;
1562 /* Nonzero if called function returns an aggregate in memory PCC style,
1563 by returning the address of where to find it. */
1564 int pcc_struct_value = 0;
1566 /* Number of actual parameters in this call, including struct value addr. */
1567 int num_actuals;
1568 /* Number of named args. Args after this are anonymous ones
1569 and they must all go on the stack. */
1570 int n_named_args;
1572 /* Vector of information about each argument.
1573 Arguments are numbered in the order they will be pushed,
1574 not the order they are written. */
1575 struct arg_data *args;
1577 /* Total size in bytes of all the stack-parms scanned so far. */
1578 struct args_size args_size;
1579 /* Size of arguments before any adjustments (such as rounding). */
1580 int unadjusted_args_size;
1581 /* Data on reg parms scanned so far. */
1582 CUMULATIVE_ARGS args_so_far;
1583 /* Nonzero if a reg parm has been scanned. */
1584 int reg_parm_seen;
1585 /* Nonzero if this is an indirect function call. */
1587 /* Nonzero if we must avoid push-insns in the args for this call.
1588 If stack space is allocated for register parameters, but not by the
1589 caller, then it is preallocated in the fixed part of the stack frame.
1590 So the entire argument block must then be preallocated (i.e., we
1591 ignore PUSH_ROUNDING in that case). */
1593 #ifdef PUSH_ROUNDING
1594 int must_preallocate = 0;
1595 #else
1596 int must_preallocate = 1;
1597 #endif
1599 /* Size of the stack reserved for parameter registers. */
1600 int reg_parm_stack_space = 0;
1602 /* Address of space preallocated for stack parms
1603 (on machines that lack push insns), or 0 if space not preallocated. */
1604 rtx argblock = 0;
1606 /* Nonzero if it is plausible that this is a call to alloca. */
1607 int may_be_alloca;
1608 /* Nonzero if this is a call to malloc or a related function. */
1609 int is_malloc;
1610 /* Nonzero if this is a call to setjmp or a related function. */
1611 int returns_twice;
1612 /* Nonzero if this is a call to `longjmp'. */
1613 int is_longjmp;
1614 /* Nonzero if this is a call to an inline function. */
1615 int is_integrable = 0;
1616 /* Nonzero if this is a call to a `const' function.
1617 Note that only explicitly named functions are handled as `const' here. */
1618 int is_const = 0;
1619 /* Nonzero if this is a call to a `volatile' function. */
1620 int is_volatile = 0;
1621 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1622 /* Define the boundary of the register parm stack space that needs to be
1623 save, if any. */
1624 int low_to_save = -1, high_to_save;
1625 rtx save_area = 0; /* Place that it is saved */
1626 #endif
1628 #ifdef ACCUMULATE_OUTGOING_ARGS
1629 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1630 char *initial_stack_usage_map = stack_usage_map;
1631 int old_stack_arg_under_construction;
1632 #endif
1634 rtx old_stack_level = 0;
1635 int old_pending_adj = 0;
1636 int old_inhibit_defer_pop = inhibit_defer_pop;
1637 rtx call_fusage = 0;
1638 register tree p;
1639 register int i;
1641 /* The value of the function call can be put in a hard register. But
1642 if -fcheck-memory-usage, code which invokes functions (and thus
1643 damages some hard registers) can be inserted before using the value.
1644 So, target is always a pseudo-register in that case. */
1645 if (current_function_check_memory_usage)
1646 target = 0;
1648 /* See if we can find a DECL-node for the actual function.
1649 As a result, decide whether this is a call to an integrable function. */
1651 p = TREE_OPERAND (exp, 0);
1652 if (TREE_CODE (p) == ADDR_EXPR)
1654 fndecl = TREE_OPERAND (p, 0);
1655 if (TREE_CODE (fndecl) != FUNCTION_DECL)
1656 fndecl = 0;
1657 else
1659 if (!flag_no_inline
1660 && fndecl != current_function_decl
1661 && DECL_INLINE (fndecl)
1662 && DECL_SAVED_INSNS (fndecl)
1663 && DECL_SAVED_INSNS (fndecl)->inlinable)
1664 is_integrable = 1;
1665 else if (! TREE_ADDRESSABLE (fndecl))
1667 /* In case this function later becomes inlinable,
1668 record that there was already a non-inline call to it.
1670 Use abstraction instead of setting TREE_ADDRESSABLE
1671 directly. */
1672 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1673 && optimize > 0)
1675 warning_with_decl (fndecl, "can't inline call to `%s'");
1676 warning ("called from here");
1678 mark_addressable (fndecl);
1681 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
1682 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
1683 is_const = 1;
1685 if (TREE_THIS_VOLATILE (fndecl))
1686 is_volatile = 1;
1690 /* If we don't have specific function to call, see if we have a
1691 constant or `noreturn' function from the type. */
1692 if (fndecl == 0)
1694 is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
1695 is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
1698 #ifdef REG_PARM_STACK_SPACE
1699 #ifdef MAYBE_REG_PARM_STACK_SPACE
1700 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1701 #else
1702 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1703 #endif
1704 #endif
1706 #if defined(PUSH_ROUNDING) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1707 if (reg_parm_stack_space > 0)
1708 must_preallocate = 1;
1709 #endif
1711 /* Warn if this value is an aggregate type,
1712 regardless of which calling convention we are using for it. */
1713 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
1714 warning ("function call has aggregate value");
1716 /* Set up a place to return a structure. */
1718 /* Cater to broken compilers. */
1719 if (aggregate_value_p (exp))
1721 /* This call returns a big structure. */
1722 is_const = 0;
1724 #ifdef PCC_STATIC_STRUCT_RETURN
1726 pcc_struct_value = 1;
1727 /* Easier than making that case work right. */
1728 if (is_integrable)
1730 /* In case this is a static function, note that it has been
1731 used. */
1732 if (! TREE_ADDRESSABLE (fndecl))
1733 mark_addressable (fndecl);
1734 is_integrable = 0;
1737 #else /* not PCC_STATIC_STRUCT_RETURN */
1739 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
1741 if (target && GET_CODE (target) == MEM)
1742 structure_value_addr = XEXP (target, 0);
1743 else
1745 /* Assign a temporary to hold the value. */
1746 tree d;
1748 /* For variable-sized objects, we must be called with a target
1749 specified. If we were to allocate space on the stack here,
1750 we would have no way of knowing when to free it. */
1752 if (struct_value_size < 0)
1753 abort ();
1755 /* This DECL is just something to feed to mark_addressable;
1756 it doesn't get pushed. */
1757 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1758 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
1759 mark_addressable (d);
1760 structure_value_addr = XEXP (DECL_RTL (d), 0);
1761 TREE_USED (d) = 1;
1762 target = 0;
1765 #endif /* not PCC_STATIC_STRUCT_RETURN */
1768 /* If called function is inline, try to integrate it. */
1770 if (is_integrable)
1772 rtx temp;
1773 #ifdef ACCUMULATE_OUTGOING_ARGS
1774 rtx before_call = get_last_insn ();
1775 #endif
1777 temp = expand_inline_function (fndecl, actparms, target,
1778 ignore, TREE_TYPE (exp),
1779 structure_value_addr);
1781 /* If inlining succeeded, return. */
1782 if (temp != (rtx) (HOST_WIDE_INT) -1)
1784 #ifdef ACCUMULATE_OUTGOING_ARGS
1785 /* If the outgoing argument list must be preserved, push
1786 the stack before executing the inlined function if it
1787 makes any calls. */
1789 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1790 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1791 break;
1793 if (stack_arg_under_construction || i >= 0)
1795 rtx first_insn
1796 = before_call ? NEXT_INSN (before_call) : get_insns ();
1797 rtx insn = NULL_RTX, seq;
1799 /* Look for a call in the inline function code.
1800 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1801 nonzero then there is a call and it is not necessary
1802 to scan the insns. */
1804 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
1805 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1806 if (GET_CODE (insn) == CALL_INSN)
1807 break;
1809 if (insn)
1811 /* Reserve enough stack space so that the largest
1812 argument list of any function call in the inline
1813 function does not overlap the argument list being
1814 evaluated. This is usually an overestimate because
1815 allocate_dynamic_stack_space reserves space for an
1816 outgoing argument list in addition to the requested
1817 space, but there is no way to ask for stack space such
1818 that an argument list of a certain length can be
1819 safely constructed.
1821 Add the stack space reserved for register arguments, if
1822 any, in the inline function. What is really needed is the
1823 largest value of reg_parm_stack_space in the inline
1824 function, but that is not available. Using the current
1825 value of reg_parm_stack_space is wrong, but gives
1826 correct results on all supported machines. */
1828 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
1829 + reg_parm_stack_space);
1831 start_sequence ();
1832 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1833 allocate_dynamic_stack_space (GEN_INT (adjust),
1834 NULL_RTX, BITS_PER_UNIT);
1835 seq = get_insns ();
1836 end_sequence ();
1837 emit_insns_before (seq, first_insn);
1838 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1841 #endif
1843 /* If the result is equivalent to TARGET, return TARGET to simplify
1844 checks in store_expr. They can be equivalent but not equal in the
1845 case of a function that returns BLKmode. */
1846 if (temp != target && rtx_equal_p (temp, target))
1847 return target;
1848 return temp;
1851 /* If inlining failed, mark FNDECL as needing to be compiled
1852 separately after all. If function was declared inline,
1853 give a warning. */
1854 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1855 && optimize > 0 && ! TREE_ADDRESSABLE (fndecl))
1857 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1858 warning ("called from here");
1860 mark_addressable (fndecl);
1863 function_call_count++;
1865 if (fndecl && DECL_NAME (fndecl))
1866 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
1868 /* See if this is a call to a function that can return more than once
1869 or a call to longjmp or malloc. */
1870 special_function_p (name, fndecl, &returns_twice, &is_longjmp,
1871 &is_malloc, &may_be_alloca);
1873 if (may_be_alloca)
1874 current_function_calls_alloca = 1;
1876 /* Operand 0 is a pointer-to-function; get the type of the function. */
1877 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
1878 if (! POINTER_TYPE_P (funtype))
1879 abort ();
1880 funtype = TREE_TYPE (funtype);
1882 /* When calling a const function, we must pop the stack args right away,
1883 so that the pop is deleted or moved with the call. */
1884 if (is_const)
1885 NO_DEFER_POP;
1887 /* Don't let pending stack adjusts add up to too much.
1888 Also, do all pending adjustments now
1889 if there is any chance this might be a call to alloca. */
1891 if (pending_stack_adjust >= 32
1892 || (pending_stack_adjust > 0 && may_be_alloca))
1893 do_pending_stack_adjust ();
1895 /* Push the temporary stack slot level so that we can free any temporaries
1896 we make. */
1897 push_temp_slots ();
1899 /* Start updating where the next arg would go.
1901 On some machines (such as the PA) indirect calls have a different
1902 calling convention than normal calls. The last argument in
1903 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
1904 or not. */
1905 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
1907 /* If struct_value_rtx is 0, it means pass the address
1908 as if it were an extra parameter. */
1909 if (structure_value_addr && struct_value_rtx == 0)
1911 /* If structure_value_addr is a REG other than
1912 virtual_outgoing_args_rtx, we can use always use it. If it
1913 is not a REG, we must always copy it into a register.
1914 If it is virtual_outgoing_args_rtx, we must copy it to another
1915 register in some cases. */
1916 rtx temp = (GET_CODE (structure_value_addr) != REG
1917 #ifdef ACCUMULATE_OUTGOING_ARGS
1918 || (stack_arg_under_construction
1919 && structure_value_addr == virtual_outgoing_args_rtx)
1920 #endif
1921 ? copy_addr_to_reg (structure_value_addr)
1922 : structure_value_addr);
1924 actparms
1925 = tree_cons (error_mark_node,
1926 make_tree (build_pointer_type (TREE_TYPE (funtype)),
1927 temp),
1928 actparms);
1929 structure_value_addr_parm = 1;
1932 /* Count the arguments and set NUM_ACTUALS. */
1933 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
1934 num_actuals = i;
1936 /* Compute number of named args.
1937 Normally, don't include the last named arg if anonymous args follow.
1938 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
1939 (If no anonymous args follow, the result of list_length is actually
1940 one too large. This is harmless.)
1942 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
1943 zero, this machine will be able to place unnamed args that were passed in
1944 registers into the stack. So treat all args as named. This allows the
1945 insns emitting for a specific argument list to be independent of the
1946 function declaration.
1948 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any reliable
1949 way to pass unnamed args in registers, so we must force them into
1950 memory. */
1952 if ((STRICT_ARGUMENT_NAMING
1953 || ! PRETEND_OUTGOING_VARARGS_NAMED)
1954 && TYPE_ARG_TYPES (funtype) != 0)
1955 n_named_args
1956 = (list_length (TYPE_ARG_TYPES (funtype))
1957 /* Don't include the last named arg. */
1958 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
1959 /* Count the struct value address, if it is passed as a parm. */
1960 + structure_value_addr_parm);
1961 else
1962 /* If we know nothing, treat all args as named. */
1963 n_named_args = num_actuals;
1965 /* Make a vector to hold all the information about each arg. */
1966 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
1967 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
1969 /* Build up entries inthe ARGS array, compute the size of the arguments
1970 into ARGS_SIZE, etc. */
1971 initialize_argument_information (num_actuals, args, &args_size, n_named_args,
1972 actparms, fndecl, &args_so_far,
1973 reg_parm_stack_space, &old_stack_level,
1974 &old_pending_adj, &must_preallocate,
1975 &is_const);
1977 #ifdef FINAL_REG_PARM_STACK_SPACE
1978 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
1979 args_size.var);
1980 #endif
1982 if (args_size.var)
1984 /* If this function requires a variable-sized argument list, don't try to
1985 make a cse'able block for this call. We may be able to do this
1986 eventually, but it is too complicated to keep track of what insns go
1987 in the cse'able block and which don't. */
1989 is_const = 0;
1990 must_preallocate = 1;
1993 /* Compute the actual size of the argument block required. The variable
1994 and constant sizes must be combined, the size may have to be rounded,
1995 and there may be a minimum required size. */
1996 unadjusted_args_size
1997 = compute_argument_block_size (reg_parm_stack_space, &args_size);
1999 /* Now make final decision about preallocating stack space. */
2000 must_preallocate = finalize_must_preallocate (must_preallocate,
2001 num_actuals, args, &args_size);
2003 /* If the structure value address will reference the stack pointer, we must
2004 stabilize it. We don't need to do this if we know that we are not going
2005 to adjust the stack pointer in processing this call. */
2007 if (structure_value_addr
2008 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2009 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
2010 && (args_size.var
2011 #ifndef ACCUMULATE_OUTGOING_ARGS
2012 || args_size.constant
2013 #endif
2015 structure_value_addr = copy_to_reg (structure_value_addr);
2017 /* Precompute any arguments as needed. */
2018 precompute_arguments (is_const, must_preallocate, num_actuals,
2019 args, &args_size);
2021 /* Now we are about to start emitting insns that can be deleted
2022 if a libcall is deleted. */
2023 if (is_const || is_malloc)
2024 start_sequence ();
2026 /* If we have no actual push instructions, or shouldn't use them,
2027 make space for all args right now. */
2029 if (args_size.var != 0)
2031 if (old_stack_level == 0)
2033 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2034 old_pending_adj = pending_stack_adjust;
2035 pending_stack_adjust = 0;
2036 #ifdef ACCUMULATE_OUTGOING_ARGS
2037 /* stack_arg_under_construction says whether a stack arg is
2038 being constructed at the old stack level. Pushing the stack
2039 gets a clean outgoing argument block. */
2040 old_stack_arg_under_construction = stack_arg_under_construction;
2041 stack_arg_under_construction = 0;
2042 #endif
2044 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
2046 else
2048 /* Note that we must go through the motions of allocating an argument
2049 block even if the size is zero because we may be storing args
2050 in the area reserved for register arguments, which may be part of
2051 the stack frame. */
2053 int needed = args_size.constant;
2055 /* Store the maximum argument space used. It will be pushed by
2056 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2057 checking). */
2059 if (needed > current_function_outgoing_args_size)
2060 current_function_outgoing_args_size = needed;
2062 if (must_preallocate)
2064 #ifdef ACCUMULATE_OUTGOING_ARGS
2065 /* Since the stack pointer will never be pushed, it is possible for
2066 the evaluation of a parm to clobber something we have already
2067 written to the stack. Since most function calls on RISC machines
2068 do not use the stack, this is uncommon, but must work correctly.
2070 Therefore, we save any area of the stack that was already written
2071 and that we are using. Here we set up to do this by making a new
2072 stack usage map from the old one. The actual save will be done
2073 by store_one_arg.
2075 Another approach might be to try to reorder the argument
2076 evaluations to avoid this conflicting stack usage. */
2078 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2079 /* Since we will be writing into the entire argument area, the
2080 map must be allocated for its entire size, not just the part that
2081 is the responsibility of the caller. */
2082 needed += reg_parm_stack_space;
2083 #endif
2085 #ifdef ARGS_GROW_DOWNWARD
2086 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2087 needed + 1);
2088 #else
2089 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2090 needed);
2091 #endif
2092 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2094 if (initial_highest_arg_in_use)
2095 bcopy (initial_stack_usage_map, stack_usage_map,
2096 initial_highest_arg_in_use);
2098 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2099 bzero (&stack_usage_map[initial_highest_arg_in_use],
2100 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2101 needed = 0;
2103 /* The address of the outgoing argument list must not be copied to a
2104 register here, because argblock would be left pointing to the
2105 wrong place after the call to allocate_dynamic_stack_space below.
2108 argblock = virtual_outgoing_args_rtx;
2110 #else /* not ACCUMULATE_OUTGOING_ARGS */
2111 if (inhibit_defer_pop == 0)
2113 /* Try to reuse some or all of the pending_stack_adjust
2114 to get this space. Maybe we can avoid any pushing. */
2115 if (needed > pending_stack_adjust)
2117 needed -= pending_stack_adjust;
2118 pending_stack_adjust = 0;
2120 else
2122 pending_stack_adjust -= needed;
2123 needed = 0;
2126 /* Special case this because overhead of `push_block' in this
2127 case is non-trivial. */
2128 if (needed == 0)
2129 argblock = virtual_outgoing_args_rtx;
2130 else
2131 argblock = push_block (GEN_INT (needed), 0, 0);
2133 /* We only really need to call `copy_to_reg' in the case where push
2134 insns are going to be used to pass ARGBLOCK to a function
2135 call in ARGS. In that case, the stack pointer changes value
2136 from the allocation point to the call point, and hence
2137 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
2138 But might as well always do it. */
2139 argblock = copy_to_reg (argblock);
2140 #endif /* not ACCUMULATE_OUTGOING_ARGS */
2144 #ifdef ACCUMULATE_OUTGOING_ARGS
2145 /* The save/restore code in store_one_arg handles all cases except one:
2146 a constructor call (including a C function returning a BLKmode struct)
2147 to initialize an argument. */
2148 if (stack_arg_under_construction)
2150 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2151 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
2152 #else
2153 rtx push_size = GEN_INT (args_size.constant);
2154 #endif
2155 if (old_stack_level == 0)
2157 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2158 old_pending_adj = pending_stack_adjust;
2159 pending_stack_adjust = 0;
2160 /* stack_arg_under_construction says whether a stack arg is
2161 being constructed at the old stack level. Pushing the stack
2162 gets a clean outgoing argument block. */
2163 old_stack_arg_under_construction = stack_arg_under_construction;
2164 stack_arg_under_construction = 0;
2165 /* Make a new map for the new argument list. */
2166 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
2167 bzero (stack_usage_map, highest_outgoing_arg_in_use);
2168 highest_outgoing_arg_in_use = 0;
2170 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
2172 /* If argument evaluation might modify the stack pointer, copy the
2173 address of the argument list to a register. */
2174 for (i = 0; i < num_actuals; i++)
2175 if (args[i].pass_on_stack)
2177 argblock = copy_addr_to_reg (argblock);
2178 break;
2180 #endif
2182 compute_argument_addresses (args, argblock, num_actuals);
2184 #ifdef PUSH_ARGS_REVERSED
2185 #ifdef PREFERRED_STACK_BOUNDARY
2186 /* If we push args individually in reverse order, perform stack alignment
2187 before the first push (the last arg). */
2188 if (argblock == 0)
2189 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
2190 #endif
2191 #endif
2193 /* Don't try to defer pops if preallocating, not even from the first arg,
2194 since ARGBLOCK probably refers to the SP. */
2195 if (argblock)
2196 NO_DEFER_POP;
2198 funexp = rtx_for_function_call (fndecl, exp);
2200 /* Figure out the register where the value, if any, will come back. */
2201 valreg = 0;
2202 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2203 && ! structure_value_addr)
2205 if (pcc_struct_value)
2206 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2207 fndecl);
2208 else
2209 valreg = hard_function_value (TREE_TYPE (exp), fndecl);
2212 /* Precompute all register parameters. It isn't safe to compute anything
2213 once we have started filling any specific hard regs. */
2214 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2216 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2218 /* Save the fixed argument area if it's part of the caller's frame and
2219 is clobbered by argument setup for this call. */
2220 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2221 &low_to_save, &high_to_save);
2222 #endif
2225 /* Now store (and compute if necessary) all non-register parms.
2226 These come before register parms, since they can require block-moves,
2227 which could clobber the registers used for register parms.
2228 Parms which have partial registers are not stored here,
2229 but we do preallocate space here if they want that. */
2231 for (i = 0; i < num_actuals; i++)
2232 if (args[i].reg == 0 || args[i].pass_on_stack)
2233 store_one_arg (&args[i], argblock, may_be_alloca,
2234 args_size.var != 0, reg_parm_stack_space);
2236 /* If we have a parm that is passed in registers but not in memory
2237 and whose alignment does not permit a direct copy into registers,
2238 make a group of pseudos that correspond to each register that we
2239 will later fill. */
2240 if (STRICT_ALIGNMENT)
2241 store_unaligned_arguments_into_pseudos (args, num_actuals);
2243 /* Now store any partially-in-registers parm.
2244 This is the last place a block-move can happen. */
2245 if (reg_parm_seen)
2246 for (i = 0; i < num_actuals; i++)
2247 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2248 store_one_arg (&args[i], argblock, may_be_alloca,
2249 args_size.var != 0, reg_parm_stack_space);
2251 #ifndef PUSH_ARGS_REVERSED
2252 #ifdef PREFERRED_STACK_BOUNDARY
2253 /* If we pushed args in forward order, perform stack alignment
2254 after pushing the last arg. */
2255 if (argblock == 0)
2256 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
2257 #endif
2258 #endif
2260 /* If register arguments require space on the stack and stack space
2261 was not preallocated, allocate stack space here for arguments
2262 passed in registers. */
2263 #if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
2264 if (must_preallocate == 0 && reg_parm_stack_space > 0)
2265 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2266 #endif
2268 /* Pass the function the address in which to return a structure value. */
2269 if (structure_value_addr && ! structure_value_addr_parm)
2271 emit_move_insn (struct_value_rtx,
2272 force_reg (Pmode,
2273 force_operand (structure_value_addr,
2274 NULL_RTX)));
2276 /* Mark the memory for the aggregate as write-only. */
2277 if (current_function_check_memory_usage)
2278 emit_library_call (chkr_set_right_libfunc, 1,
2279 VOIDmode, 3,
2280 structure_value_addr, Pmode,
2281 GEN_INT (struct_value_size), TYPE_MODE (sizetype),
2282 GEN_INT (MEMORY_USE_WO),
2283 TYPE_MODE (integer_type_node));
2285 if (GET_CODE (struct_value_rtx) == REG)
2286 use_reg (&call_fusage, struct_value_rtx);
2289 funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
2291 load_register_parameters (args, num_actuals, &call_fusage);
2293 /* Perform postincrements before actually calling the function. */
2294 emit_queue ();
2296 /* All arguments and registers used for the call must be set up by now! */
2298 /* Generate the actual call instruction. */
2299 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
2300 args_size.constant, struct_value_size,
2301 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2302 valreg, old_inhibit_defer_pop, call_fusage, is_const);
2304 /* If call is cse'able, make appropriate pair of reg-notes around it.
2305 Test valreg so we don't crash; may safely ignore `const'
2306 if return type is void. Disable for PARALLEL return values, because
2307 we have no way to move such values into a pseudo register. */
2308 if (is_const && valreg != 0 && GET_CODE (valreg) != PARALLEL)
2310 rtx note = 0;
2311 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2312 rtx insns;
2314 /* Mark the return value as a pointer if needed. */
2315 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2317 tree pointed_to = TREE_TYPE (TREE_TYPE (exp));
2318 mark_reg_pointer (temp, TYPE_ALIGN (pointed_to) / BITS_PER_UNIT);
2321 /* Construct an "equal form" for the value which mentions all the
2322 arguments in order as well as the function name. */
2323 #ifdef PUSH_ARGS_REVERSED
2324 for (i = 0; i < num_actuals; i++)
2325 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2326 #else
2327 for (i = num_actuals - 1; i >= 0; i--)
2328 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2329 #endif
2330 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2332 insns = get_insns ();
2333 end_sequence ();
2335 emit_libcall_block (insns, temp, valreg, note);
2337 valreg = temp;
2339 else if (is_const)
2341 /* Otherwise, just write out the sequence without a note. */
2342 rtx insns = get_insns ();
2344 end_sequence ();
2345 emit_insns (insns);
2347 else if (is_malloc)
2349 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2350 rtx last, insns;
2352 /* The return value from a malloc-like function is a pointer. */
2353 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2354 mark_reg_pointer (temp, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2356 emit_move_insn (temp, valreg);
2358 /* The return value from a malloc-like function can not alias
2359 anything else. */
2360 last = get_last_insn ();
2361 REG_NOTES (last) =
2362 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2364 /* Write out the sequence. */
2365 insns = get_insns ();
2366 end_sequence ();
2367 emit_insns (insns);
2368 valreg = temp;
2371 /* For calls to `setjmp', etc., inform flow.c it should complain
2372 if nonvolatile values are live. */
2374 if (returns_twice)
2376 emit_note (name, NOTE_INSN_SETJMP);
2377 current_function_calls_setjmp = 1;
2380 if (is_longjmp)
2381 current_function_calls_longjmp = 1;
2383 /* Notice functions that cannot return.
2384 If optimizing, insns emitted below will be dead.
2385 If not optimizing, they will exist, which is useful
2386 if the user uses the `return' command in the debugger. */
2388 if (is_volatile || is_longjmp)
2389 emit_barrier ();
2391 /* If value type not void, return an rtx for the value. */
2393 /* If there are cleanups to be called, don't use a hard reg as target.
2394 We need to double check this and see if it matters anymore. */
2395 if (any_pending_cleanups (1)
2396 && target && REG_P (target)
2397 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2398 target = 0;
2400 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2401 || ignore)
2403 target = const0_rtx;
2405 else if (structure_value_addr)
2407 if (target == 0 || GET_CODE (target) != MEM)
2409 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2410 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2411 structure_value_addr));
2412 MEM_SET_IN_STRUCT_P (target,
2413 AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2416 else if (pcc_struct_value)
2418 /* This is the special C++ case where we need to
2419 know what the true target was. We take care to
2420 never use this value more than once in one expression. */
2421 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2422 copy_to_reg (valreg));
2423 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2425 /* Handle calls that return values in multiple non-contiguous locations.
2426 The Irix 6 ABI has examples of this. */
2427 else if (GET_CODE (valreg) == PARALLEL)
2429 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2431 if (target == 0)
2433 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)), bytes, 0);
2434 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2435 preserve_temp_slots (target);
2438 if (! rtx_equal_p (target, valreg))
2439 emit_group_store (target, valreg, bytes,
2440 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2442 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2443 && GET_MODE (target) == GET_MODE (valreg))
2444 /* TARGET and VALREG cannot be equal at this point because the latter
2445 would not have REG_FUNCTION_VALUE_P true, while the former would if
2446 it were referring to the same register.
2448 If they refer to the same register, this move will be a no-op, except
2449 when function inlining is being done. */
2450 emit_move_insn (target, valreg);
2451 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2452 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
2453 else
2454 target = copy_to_reg (valreg);
2456 #ifdef PROMOTE_FUNCTION_RETURN
2457 /* If we promoted this return value, make the proper SUBREG. TARGET
2458 might be const0_rtx here, so be careful. */
2459 if (GET_CODE (target) == REG
2460 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2461 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2463 tree type = TREE_TYPE (exp);
2464 int unsignedp = TREE_UNSIGNED (type);
2466 /* If we don't promote as expected, something is wrong. */
2467 if (GET_MODE (target)
2468 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
2469 abort ();
2471 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
2472 SUBREG_PROMOTED_VAR_P (target) = 1;
2473 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2475 #endif
2477 /* If size of args is variable or this was a constructor call for a stack
2478 argument, restore saved stack-pointer value. */
2480 if (old_stack_level)
2482 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2483 pending_stack_adjust = old_pending_adj;
2484 #ifdef ACCUMULATE_OUTGOING_ARGS
2485 stack_arg_under_construction = old_stack_arg_under_construction;
2486 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2487 stack_usage_map = initial_stack_usage_map;
2488 #endif
2490 #ifdef ACCUMULATE_OUTGOING_ARGS
2491 else
2493 #ifdef REG_PARM_STACK_SPACE
2494 if (save_area)
2495 restore_fixed_argument_area (save_area, argblock,
2496 high_to_save, low_to_save);
2497 #endif
2499 /* If we saved any argument areas, restore them. */
2500 for (i = 0; i < num_actuals; i++)
2501 if (args[i].save_area)
2503 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2504 rtx stack_area
2505 = gen_rtx_MEM (save_mode,
2506 memory_address (save_mode,
2507 XEXP (args[i].stack_slot, 0)));
2509 if (save_mode != BLKmode)
2510 emit_move_insn (stack_area, args[i].save_area);
2511 else
2512 emit_block_move (stack_area, validize_mem (args[i].save_area),
2513 GEN_INT (args[i].size.constant),
2514 PARM_BOUNDARY / BITS_PER_UNIT);
2517 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2518 stack_usage_map = initial_stack_usage_map;
2520 #endif
2522 /* If this was alloca, record the new stack level for nonlocal gotos.
2523 Check for the handler slots since we might not have a save area
2524 for non-local gotos. */
2526 if (may_be_alloca && nonlocal_goto_handler_slots != 0)
2527 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
2529 pop_temp_slots ();
2531 /* Free up storage we no longer need. */
2532 for (i = 0; i < num_actuals; ++i)
2533 if (args[i].aligned_regs)
2534 free (args[i].aligned_regs);
2536 return target;
2539 /* Output a library call to function FUN (a SYMBOL_REF rtx)
2540 (emitting the queue unless NO_QUEUE is nonzero),
2541 for a value of mode OUTMODE,
2542 with NARGS different arguments, passed as alternating rtx values
2543 and machine_modes to convert them to.
2544 The rtx values should have been passed through protect_from_queue already.
2546 NO_QUEUE will be true if and only if the library call is a `const' call
2547 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2548 to the variable is_const in expand_call.
2550 NO_QUEUE must be true for const calls, because if it isn't, then
2551 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2552 and will be lost if the libcall sequence is optimized away.
2554 NO_QUEUE must be false for non-const calls, because if it isn't, the
2555 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2556 optimized. For instance, the instruction scheduler may incorrectly
2557 move memory references across the non-const call. */
2559 void
2560 emit_library_call VPROTO((rtx orgfun, int no_queue, enum machine_mode outmode,
2561 int nargs, ...))
2563 #ifndef ANSI_PROTOTYPES
2564 rtx orgfun;
2565 int no_queue;
2566 enum machine_mode outmode;
2567 int nargs;
2568 #endif
2569 va_list p;
2570 /* Total size in bytes of all the stack-parms scanned so far. */
2571 struct args_size args_size;
2572 /* Size of arguments before any adjustments (such as rounding). */
2573 struct args_size original_args_size;
2574 register int argnum;
2575 rtx fun;
2576 int inc;
2577 int count;
2578 rtx argblock = 0;
2579 CUMULATIVE_ARGS args_so_far;
2580 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2581 struct args_size offset; struct args_size size; rtx save_area; };
2582 struct arg *argvec;
2583 int old_inhibit_defer_pop = inhibit_defer_pop;
2584 rtx call_fusage = 0;
2585 int reg_parm_stack_space = 0;
2586 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2587 /* Define the boundary of the register parm stack space that needs to be
2588 save, if any. */
2589 int low_to_save = -1, high_to_save = 0;
2590 rtx save_area = 0; /* Place that it is saved */
2591 #endif
2593 #ifdef ACCUMULATE_OUTGOING_ARGS
2594 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2595 char *initial_stack_usage_map = stack_usage_map;
2596 int needed;
2597 #endif
2599 #ifdef REG_PARM_STACK_SPACE
2600 /* Size of the stack reserved for parameter registers. */
2601 #ifdef MAYBE_REG_PARM_STACK_SPACE
2602 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2603 #else
2604 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
2605 #endif
2606 #endif
2608 VA_START (p, nargs);
2610 #ifndef ANSI_PROTOTYPES
2611 orgfun = va_arg (p, rtx);
2612 no_queue = va_arg (p, int);
2613 outmode = va_arg (p, enum machine_mode);
2614 nargs = va_arg (p, int);
2615 #endif
2617 fun = orgfun;
2619 /* Copy all the libcall-arguments out of the varargs data
2620 and into a vector ARGVEC.
2622 Compute how to pass each argument. We only support a very small subset
2623 of the full argument passing conventions to limit complexity here since
2624 library functions shouldn't have many args. */
2626 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2627 bzero ((char *) argvec, nargs * sizeof (struct arg));
2630 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
2632 args_size.constant = 0;
2633 args_size.var = 0;
2635 push_temp_slots ();
2637 for (count = 0; count < nargs; count++)
2639 rtx val = va_arg (p, rtx);
2640 enum machine_mode mode = va_arg (p, enum machine_mode);
2642 /* We cannot convert the arg value to the mode the library wants here;
2643 must do it earlier where we know the signedness of the arg. */
2644 if (mode == BLKmode
2645 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2646 abort ();
2648 /* On some machines, there's no way to pass a float to a library fcn.
2649 Pass it as a double instead. */
2650 #ifdef LIBGCC_NEEDS_DOUBLE
2651 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2652 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
2653 #endif
2655 /* There's no need to call protect_from_queue, because
2656 either emit_move_insn or emit_push_insn will do that. */
2658 /* Make sure it is a reasonable operand for a move or push insn. */
2659 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2660 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2661 val = force_operand (val, NULL_RTX);
2663 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2664 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2666 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2667 be viewed as just an efficiency improvement. */
2668 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2669 emit_move_insn (slot, val);
2670 val = force_operand (XEXP (slot, 0), NULL_RTX);
2671 mode = Pmode;
2673 #endif
2675 argvec[count].value = val;
2676 argvec[count].mode = mode;
2678 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2679 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
2680 abort ();
2681 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2682 argvec[count].partial
2683 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2684 #else
2685 argvec[count].partial = 0;
2686 #endif
2688 locate_and_pad_parm (mode, NULL_TREE,
2689 argvec[count].reg && argvec[count].partial == 0,
2690 NULL_TREE, &args_size, &argvec[count].offset,
2691 &argvec[count].size);
2693 if (argvec[count].size.var)
2694 abort ();
2696 if (reg_parm_stack_space == 0 && argvec[count].partial)
2697 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2699 if (argvec[count].reg == 0 || argvec[count].partial != 0
2700 || reg_parm_stack_space > 0)
2701 args_size.constant += argvec[count].size.constant;
2703 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
2705 va_end (p);
2707 #ifdef FINAL_REG_PARM_STACK_SPACE
2708 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2709 args_size.var);
2710 #endif
2712 /* If this machine requires an external definition for library
2713 functions, write one out. */
2714 assemble_external_libcall (fun);
2716 original_args_size = args_size;
2717 #ifdef PREFERRED_STACK_BOUNDARY
2718 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2719 / STACK_BYTES) * STACK_BYTES);
2720 #endif
2722 args_size.constant = MAX (args_size.constant,
2723 reg_parm_stack_space);
2725 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2726 args_size.constant -= reg_parm_stack_space;
2727 #endif
2729 if (args_size.constant > current_function_outgoing_args_size)
2730 current_function_outgoing_args_size = args_size.constant;
2732 #ifdef ACCUMULATE_OUTGOING_ARGS
2733 /* Since the stack pointer will never be pushed, it is possible for
2734 the evaluation of a parm to clobber something we have already
2735 written to the stack. Since most function calls on RISC machines
2736 do not use the stack, this is uncommon, but must work correctly.
2738 Therefore, we save any area of the stack that was already written
2739 and that we are using. Here we set up to do this by making a new
2740 stack usage map from the old one.
2742 Another approach might be to try to reorder the argument
2743 evaluations to avoid this conflicting stack usage. */
2745 needed = args_size.constant;
2747 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2748 /* Since we will be writing into the entire argument area, the
2749 map must be allocated for its entire size, not just the part that
2750 is the responsibility of the caller. */
2751 needed += reg_parm_stack_space;
2752 #endif
2754 #ifdef ARGS_GROW_DOWNWARD
2755 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2756 needed + 1);
2757 #else
2758 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2759 needed);
2760 #endif
2761 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2763 if (initial_highest_arg_in_use)
2764 bcopy (initial_stack_usage_map, stack_usage_map,
2765 initial_highest_arg_in_use);
2767 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2768 bzero (&stack_usage_map[initial_highest_arg_in_use],
2769 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2770 needed = 0;
2772 /* The address of the outgoing argument list must not be copied to a
2773 register here, because argblock would be left pointing to the
2774 wrong place after the call to allocate_dynamic_stack_space below.
2777 argblock = virtual_outgoing_args_rtx;
2778 #else /* not ACCUMULATE_OUTGOING_ARGS */
2779 #ifndef PUSH_ROUNDING
2780 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2781 #endif
2782 #endif
2784 #ifdef PUSH_ARGS_REVERSED
2785 #ifdef PREFERRED_STACK_BOUNDARY
2786 /* If we push args individually in reverse order, perform stack alignment
2787 before the first push (the last arg). */
2788 if (argblock == 0)
2789 anti_adjust_stack (GEN_INT (args_size.constant
2790 - original_args_size.constant));
2791 #endif
2792 #endif
2794 #ifdef PUSH_ARGS_REVERSED
2795 inc = -1;
2796 argnum = nargs - 1;
2797 #else
2798 inc = 1;
2799 argnum = 0;
2800 #endif
2802 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2803 /* The argument list is the property of the called routine and it
2804 may clobber it. If the fixed area has been used for previous
2805 parameters, we must save and restore it.
2807 Here we compute the boundary of the that needs to be saved, if any. */
2809 #ifdef ARGS_GROW_DOWNWARD
2810 for (count = 0; count < reg_parm_stack_space + 1; count++)
2811 #else
2812 for (count = 0; count < reg_parm_stack_space; count++)
2813 #endif
2815 if (count >= highest_outgoing_arg_in_use
2816 || stack_usage_map[count] == 0)
2817 continue;
2819 if (low_to_save == -1)
2820 low_to_save = count;
2822 high_to_save = count;
2825 if (low_to_save >= 0)
2827 int num_to_save = high_to_save - low_to_save + 1;
2828 enum machine_mode save_mode
2829 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
2830 rtx stack_area;
2832 /* If we don't have the required alignment, must do this in BLKmode. */
2833 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
2834 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
2835 save_mode = BLKmode;
2837 #ifdef ARGS_GROW_DOWNWARD
2838 stack_area = gen_rtx_MEM (save_mode,
2839 memory_address (save_mode,
2840 plus_constant (argblock,
2841 - high_to_save)));
2842 #else
2843 stack_area = gen_rtx_MEM (save_mode,
2844 memory_address (save_mode,
2845 plus_constant (argblock,
2846 low_to_save)));
2847 #endif
2848 if (save_mode == BLKmode)
2850 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
2851 emit_block_move (validize_mem (save_area), stack_area,
2852 GEN_INT (num_to_save),
2853 PARM_BOUNDARY / BITS_PER_UNIT);
2855 else
2857 save_area = gen_reg_rtx (save_mode);
2858 emit_move_insn (save_area, stack_area);
2861 #endif
2863 /* Push the args that need to be pushed. */
2865 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2866 are to be pushed. */
2867 for (count = 0; count < nargs; count++, argnum += inc)
2869 register enum machine_mode mode = argvec[argnum].mode;
2870 register rtx val = argvec[argnum].value;
2871 rtx reg = argvec[argnum].reg;
2872 int partial = argvec[argnum].partial;
2873 #ifdef ACCUMULATE_OUTGOING_ARGS
2874 int lower_bound, upper_bound, i;
2875 #endif
2877 if (! (reg != 0 && partial == 0))
2879 #ifdef ACCUMULATE_OUTGOING_ARGS
2880 /* If this is being stored into a pre-allocated, fixed-size, stack
2881 area, save any previous data at that location. */
2883 #ifdef ARGS_GROW_DOWNWARD
2884 /* stack_slot is negative, but we want to index stack_usage_map
2885 with positive values. */
2886 upper_bound = -argvec[argnum].offset.constant + 1;
2887 lower_bound = upper_bound - argvec[argnum].size.constant;
2888 #else
2889 lower_bound = argvec[argnum].offset.constant;
2890 upper_bound = lower_bound + argvec[argnum].size.constant;
2891 #endif
2893 for (i = lower_bound; i < upper_bound; i++)
2894 if (stack_usage_map[i]
2895 /* Don't store things in the fixed argument area at this point;
2896 it has already been saved. */
2897 && i > reg_parm_stack_space)
2898 break;
2900 if (i != upper_bound)
2902 /* We need to make a save area. See what mode we can make it. */
2903 enum machine_mode save_mode
2904 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
2905 MODE_INT, 1);
2906 rtx stack_area
2907 = gen_rtx_MEM
2908 (save_mode,
2909 memory_address
2910 (save_mode,
2911 plus_constant (argblock,
2912 argvec[argnum].offset.constant)));
2914 argvec[argnum].save_area = gen_reg_rtx (save_mode);
2915 emit_move_insn (argvec[argnum].save_area, stack_area);
2917 #endif
2918 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2919 argblock, GEN_INT (argvec[argnum].offset.constant),
2920 reg_parm_stack_space);
2922 #ifdef ACCUMULATE_OUTGOING_ARGS
2923 /* Now mark the segment we just used. */
2924 for (i = lower_bound; i < upper_bound; i++)
2925 stack_usage_map[i] = 1;
2926 #endif
2928 NO_DEFER_POP;
2932 #ifndef PUSH_ARGS_REVERSED
2933 #ifdef PREFERRED_STACK_BOUNDARY
2934 /* If we pushed args in forward order, perform stack alignment
2935 after pushing the last arg. */
2936 if (argblock == 0)
2937 anti_adjust_stack (GEN_INT (args_size.constant
2938 - original_args_size.constant));
2939 #endif
2940 #endif
2942 #ifdef PUSH_ARGS_REVERSED
2943 argnum = nargs - 1;
2944 #else
2945 argnum = 0;
2946 #endif
2948 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
2950 /* Now load any reg parms into their regs. */
2952 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2953 are to be pushed. */
2954 for (count = 0; count < nargs; count++, argnum += inc)
2956 register rtx val = argvec[argnum].value;
2957 rtx reg = argvec[argnum].reg;
2958 int partial = argvec[argnum].partial;
2960 if (reg != 0 && partial == 0)
2961 emit_move_insn (reg, val);
2962 NO_DEFER_POP;
2965 /* For version 1.37, try deleting this entirely. */
2966 if (! no_queue)
2967 emit_queue ();
2969 /* Any regs containing parms remain in use through the call. */
2970 for (count = 0; count < nargs; count++)
2971 if (argvec[count].reg != 0)
2972 use_reg (&call_fusage, argvec[count].reg);
2974 /* Don't allow popping to be deferred, since then
2975 cse'ing of library calls could delete a call and leave the pop. */
2976 NO_DEFER_POP;
2978 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2979 will set inhibit_defer_pop to that value. */
2981 /* The return type is needed to decide how many bytes the function pops.
2982 Signedness plays no role in that, so for simplicity, we pretend it's
2983 always signed. We also assume that the list of arguments passed has
2984 no impact, so we pretend it is unknown. */
2986 emit_call_1 (fun,
2987 get_identifier (XSTR (orgfun, 0)),
2988 build_function_type (outmode == VOIDmode ? void_type_node
2989 : type_for_mode (outmode, 0), NULL_TREE),
2990 original_args_size.constant, args_size.constant, 0,
2991 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2992 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2993 old_inhibit_defer_pop + 1, call_fusage, no_queue);
2995 pop_temp_slots ();
2997 /* Now restore inhibit_defer_pop to its actual original value. */
2998 OK_DEFER_POP;
3000 #ifdef ACCUMULATE_OUTGOING_ARGS
3001 #ifdef REG_PARM_STACK_SPACE
3002 if (save_area)
3004 enum machine_mode save_mode = GET_MODE (save_area);
3005 #ifdef ARGS_GROW_DOWNWARD
3006 rtx stack_area
3007 = gen_rtx_MEM (save_mode,
3008 memory_address (save_mode,
3009 plus_constant (argblock,
3010 - high_to_save)));
3011 #else
3012 rtx stack_area
3013 = gen_rtx_MEM (save_mode,
3014 memory_address (save_mode,
3015 plus_constant (argblock, low_to_save)));
3016 #endif
3018 if (save_mode != BLKmode)
3019 emit_move_insn (stack_area, save_area);
3020 else
3021 emit_block_move (stack_area, validize_mem (save_area),
3022 GEN_INT (high_to_save - low_to_save + 1),
3023 PARM_BOUNDARY / BITS_PER_UNIT);
3025 #endif
3027 /* If we saved any argument areas, restore them. */
3028 for (count = 0; count < nargs; count++)
3029 if (argvec[count].save_area)
3031 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3032 rtx stack_area
3033 = gen_rtx_MEM (save_mode,
3034 memory_address
3035 (save_mode,
3036 plus_constant (argblock,
3037 argvec[count].offset.constant)));
3039 emit_move_insn (stack_area, argvec[count].save_area);
3042 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3043 stack_usage_map = initial_stack_usage_map;
3044 #endif
3047 /* Like emit_library_call except that an extra argument, VALUE,
3048 comes second and says where to store the result.
3049 (If VALUE is zero, this function chooses a convenient way
3050 to return the value.
3052 This function returns an rtx for where the value is to be found.
3053 If VALUE is nonzero, VALUE is returned. */
3056 emit_library_call_value VPROTO((rtx orgfun, rtx value, int no_queue,
3057 enum machine_mode outmode, int nargs, ...))
3059 #ifndef ANSI_PROTOTYPES
3060 rtx orgfun;
3061 rtx value;
3062 int no_queue;
3063 enum machine_mode outmode;
3064 int nargs;
3065 #endif
3066 va_list p;
3067 /* Total size in bytes of all the stack-parms scanned so far. */
3068 struct args_size args_size;
3069 /* Size of arguments before any adjustments (such as rounding). */
3070 struct args_size original_args_size;
3071 register int argnum;
3072 rtx fun;
3073 int inc;
3074 int count;
3075 rtx argblock = 0;
3076 CUMULATIVE_ARGS args_so_far;
3077 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
3078 struct args_size offset; struct args_size size; rtx save_area; };
3079 struct arg *argvec;
3080 int old_inhibit_defer_pop = inhibit_defer_pop;
3081 rtx call_fusage = 0;
3082 rtx mem_value = 0;
3083 int pcc_struct_value = 0;
3084 int struct_value_size = 0;
3085 int is_const;
3086 int reg_parm_stack_space = 0;
3087 #ifdef ACCUMULATE_OUTGOING_ARGS
3088 int needed;
3089 #endif
3091 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3092 /* Define the boundary of the register parm stack space that needs to be
3093 save, if any. */
3094 int low_to_save = -1, high_to_save = 0;
3095 rtx save_area = 0; /* Place that it is saved */
3096 #endif
3098 #ifdef ACCUMULATE_OUTGOING_ARGS
3099 /* Size of the stack reserved for parameter registers. */
3100 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3101 char *initial_stack_usage_map = stack_usage_map;
3102 #endif
3104 #ifdef REG_PARM_STACK_SPACE
3105 #ifdef MAYBE_REG_PARM_STACK_SPACE
3106 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3107 #else
3108 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3109 #endif
3110 #endif
3112 VA_START (p, nargs);
3114 #ifndef ANSI_PROTOTYPES
3115 orgfun = va_arg (p, rtx);
3116 value = va_arg (p, rtx);
3117 no_queue = va_arg (p, int);
3118 outmode = va_arg (p, enum machine_mode);
3119 nargs = va_arg (p, int);
3120 #endif
3122 is_const = no_queue;
3123 fun = orgfun;
3125 /* If this kind of value comes back in memory,
3126 decide where in memory it should come back. */
3127 if (aggregate_value_p (type_for_mode (outmode, 0)))
3129 #ifdef PCC_STATIC_STRUCT_RETURN
3130 rtx pointer_reg
3131 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
3133 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3134 pcc_struct_value = 1;
3135 if (value == 0)
3136 value = gen_reg_rtx (outmode);
3137 #else /* not PCC_STATIC_STRUCT_RETURN */
3138 struct_value_size = GET_MODE_SIZE (outmode);
3139 if (value != 0 && GET_CODE (value) == MEM)
3140 mem_value = value;
3141 else
3142 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
3143 #endif
3145 /* This call returns a big structure. */
3146 is_const = 0;
3149 /* ??? Unfinished: must pass the memory address as an argument. */
3151 /* Copy all the libcall-arguments out of the varargs data
3152 and into a vector ARGVEC.
3154 Compute how to pass each argument. We only support a very small subset
3155 of the full argument passing conventions to limit complexity here since
3156 library functions shouldn't have many args. */
3158 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
3159 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
3161 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3163 args_size.constant = 0;
3164 args_size.var = 0;
3166 count = 0;
3168 push_temp_slots ();
3170 /* If there's a structure value address to be passed,
3171 either pass it in the special place, or pass it as an extra argument. */
3172 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3174 rtx addr = XEXP (mem_value, 0);
3175 nargs++;
3177 /* Make sure it is a reasonable operand for a move or push insn. */
3178 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3179 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3180 addr = force_operand (addr, NULL_RTX);
3182 argvec[count].value = addr;
3183 argvec[count].mode = Pmode;
3184 argvec[count].partial = 0;
3186 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3187 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3188 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3189 abort ();
3190 #endif
3192 locate_and_pad_parm (Pmode, NULL_TREE,
3193 argvec[count].reg && argvec[count].partial == 0,
3194 NULL_TREE, &args_size, &argvec[count].offset,
3195 &argvec[count].size);
3198 if (argvec[count].reg == 0 || argvec[count].partial != 0
3199 || reg_parm_stack_space > 0)
3200 args_size.constant += argvec[count].size.constant;
3202 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3204 count++;
3207 for (; count < nargs; count++)
3209 rtx val = va_arg (p, rtx);
3210 enum machine_mode mode = va_arg (p, enum machine_mode);
3212 /* We cannot convert the arg value to the mode the library wants here;
3213 must do it earlier where we know the signedness of the arg. */
3214 if (mode == BLKmode
3215 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3216 abort ();
3218 /* On some machines, there's no way to pass a float to a library fcn.
3219 Pass it as a double instead. */
3220 #ifdef LIBGCC_NEEDS_DOUBLE
3221 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3222 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3223 #endif
3225 /* There's no need to call protect_from_queue, because
3226 either emit_move_insn or emit_push_insn will do that. */
3228 /* Make sure it is a reasonable operand for a move or push insn. */
3229 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3230 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3231 val = force_operand (val, NULL_RTX);
3233 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3234 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3236 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
3237 be viewed as just an efficiency improvement. */
3238 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3239 emit_move_insn (slot, val);
3240 val = XEXP (slot, 0);
3241 mode = Pmode;
3243 #endif
3245 argvec[count].value = val;
3246 argvec[count].mode = mode;
3248 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3249 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
3250 abort ();
3251 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3252 argvec[count].partial
3253 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3254 #else
3255 argvec[count].partial = 0;
3256 #endif
3258 locate_and_pad_parm (mode, NULL_TREE,
3259 argvec[count].reg && argvec[count].partial == 0,
3260 NULL_TREE, &args_size, &argvec[count].offset,
3261 &argvec[count].size);
3263 if (argvec[count].size.var)
3264 abort ();
3266 if (reg_parm_stack_space == 0 && argvec[count].partial)
3267 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3269 if (argvec[count].reg == 0 || argvec[count].partial != 0
3270 || reg_parm_stack_space > 0)
3271 args_size.constant += argvec[count].size.constant;
3273 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3275 va_end (p);
3277 #ifdef FINAL_REG_PARM_STACK_SPACE
3278 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3279 args_size.var);
3280 #endif
3281 /* If this machine requires an external definition for library
3282 functions, write one out. */
3283 assemble_external_libcall (fun);
3285 original_args_size = args_size;
3286 #ifdef PREFERRED_STACK_BOUNDARY
3287 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
3288 / STACK_BYTES) * STACK_BYTES);
3289 #endif
3291 args_size.constant = MAX (args_size.constant,
3292 reg_parm_stack_space);
3294 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3295 args_size.constant -= reg_parm_stack_space;
3296 #endif
3298 if (args_size.constant > current_function_outgoing_args_size)
3299 current_function_outgoing_args_size = args_size.constant;
3301 #ifdef ACCUMULATE_OUTGOING_ARGS
3302 /* Since the stack pointer will never be pushed, it is possible for
3303 the evaluation of a parm to clobber something we have already
3304 written to the stack. Since most function calls on RISC machines
3305 do not use the stack, this is uncommon, but must work correctly.
3307 Therefore, we save any area of the stack that was already written
3308 and that we are using. Here we set up to do this by making a new
3309 stack usage map from the old one.
3311 Another approach might be to try to reorder the argument
3312 evaluations to avoid this conflicting stack usage. */
3314 needed = args_size.constant;
3316 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3317 /* Since we will be writing into the entire argument area, the
3318 map must be allocated for its entire size, not just the part that
3319 is the responsibility of the caller. */
3320 needed += reg_parm_stack_space;
3321 #endif
3323 #ifdef ARGS_GROW_DOWNWARD
3324 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3325 needed + 1);
3326 #else
3327 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3328 needed);
3329 #endif
3330 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3332 if (initial_highest_arg_in_use)
3333 bcopy (initial_stack_usage_map, stack_usage_map,
3334 initial_highest_arg_in_use);
3336 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3337 bzero (&stack_usage_map[initial_highest_arg_in_use],
3338 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3339 needed = 0;
3341 /* The address of the outgoing argument list must not be copied to a
3342 register here, because argblock would be left pointing to the
3343 wrong place after the call to allocate_dynamic_stack_space below.
3346 argblock = virtual_outgoing_args_rtx;
3347 #else /* not ACCUMULATE_OUTGOING_ARGS */
3348 #ifndef PUSH_ROUNDING
3349 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3350 #endif
3351 #endif
3353 #ifdef PUSH_ARGS_REVERSED
3354 #ifdef PREFERRED_STACK_BOUNDARY
3355 /* If we push args individually in reverse order, perform stack alignment
3356 before the first push (the last arg). */
3357 if (argblock == 0)
3358 anti_adjust_stack (GEN_INT (args_size.constant
3359 - original_args_size.constant));
3360 #endif
3361 #endif
3363 #ifdef PUSH_ARGS_REVERSED
3364 inc = -1;
3365 argnum = nargs - 1;
3366 #else
3367 inc = 1;
3368 argnum = 0;
3369 #endif
3371 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3372 /* The argument list is the property of the called routine and it
3373 may clobber it. If the fixed area has been used for previous
3374 parameters, we must save and restore it.
3376 Here we compute the boundary of the that needs to be saved, if any. */
3378 #ifdef ARGS_GROW_DOWNWARD
3379 for (count = 0; count < reg_parm_stack_space + 1; count++)
3380 #else
3381 for (count = 0; count < reg_parm_stack_space; count++)
3382 #endif
3384 if (count >= highest_outgoing_arg_in_use
3385 || stack_usage_map[count] == 0)
3386 continue;
3388 if (low_to_save == -1)
3389 low_to_save = count;
3391 high_to_save = count;
3394 if (low_to_save >= 0)
3396 int num_to_save = high_to_save - low_to_save + 1;
3397 enum machine_mode save_mode
3398 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3399 rtx stack_area;
3401 /* If we don't have the required alignment, must do this in BLKmode. */
3402 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3403 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3404 save_mode = BLKmode;
3406 #ifdef ARGS_GROW_DOWNWARD
3407 stack_area = gen_rtx_MEM (save_mode,
3408 memory_address (save_mode,
3409 plus_constant (argblock,
3410 - high_to_save)));
3411 #else
3412 stack_area = gen_rtx_MEM (save_mode,
3413 memory_address (save_mode,
3414 plus_constant (argblock,
3415 low_to_save)));
3416 #endif
3417 if (save_mode == BLKmode)
3419 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3420 emit_block_move (validize_mem (save_area), stack_area,
3421 GEN_INT (num_to_save),
3422 PARM_BOUNDARY / BITS_PER_UNIT);
3424 else
3426 save_area = gen_reg_rtx (save_mode);
3427 emit_move_insn (save_area, stack_area);
3430 #endif
3432 /* Push the args that need to be pushed. */
3434 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3435 are to be pushed. */
3436 for (count = 0; count < nargs; count++, argnum += inc)
3438 register enum machine_mode mode = argvec[argnum].mode;
3439 register rtx val = argvec[argnum].value;
3440 rtx reg = argvec[argnum].reg;
3441 int partial = argvec[argnum].partial;
3442 #ifdef ACCUMULATE_OUTGOING_ARGS
3443 int lower_bound, upper_bound, i;
3444 #endif
3446 if (! (reg != 0 && partial == 0))
3448 #ifdef ACCUMULATE_OUTGOING_ARGS
3449 /* If this is being stored into a pre-allocated, fixed-size, stack
3450 area, save any previous data at that location. */
3452 #ifdef ARGS_GROW_DOWNWARD
3453 /* stack_slot is negative, but we want to index stack_usage_map
3454 with positive values. */
3455 upper_bound = -argvec[argnum].offset.constant + 1;
3456 lower_bound = upper_bound - argvec[argnum].size.constant;
3457 #else
3458 lower_bound = argvec[argnum].offset.constant;
3459 upper_bound = lower_bound + argvec[argnum].size.constant;
3460 #endif
3462 for (i = lower_bound; i < upper_bound; i++)
3463 if (stack_usage_map[i]
3464 /* Don't store things in the fixed argument area at this point;
3465 it has already been saved. */
3466 && i > reg_parm_stack_space)
3467 break;
3469 if (i != upper_bound)
3471 /* We need to make a save area. See what mode we can make it. */
3472 enum machine_mode save_mode
3473 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
3474 MODE_INT, 1);
3475 rtx stack_area
3476 = gen_rtx_MEM
3477 (save_mode,
3478 memory_address
3479 (save_mode,
3480 plus_constant (argblock,
3481 argvec[argnum].offset.constant)));
3482 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3484 emit_move_insn (argvec[argnum].save_area, stack_area);
3486 #endif
3487 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
3488 argblock, GEN_INT (argvec[argnum].offset.constant),
3489 reg_parm_stack_space);
3491 #ifdef ACCUMULATE_OUTGOING_ARGS
3492 /* Now mark the segment we just used. */
3493 for (i = lower_bound; i < upper_bound; i++)
3494 stack_usage_map[i] = 1;
3495 #endif
3497 NO_DEFER_POP;
3501 #ifndef PUSH_ARGS_REVERSED
3502 #ifdef PREFERRED_STACK_BOUNDARY
3503 /* If we pushed args in forward order, perform stack alignment
3504 after pushing the last arg. */
3505 if (argblock == 0)
3506 anti_adjust_stack (GEN_INT (args_size.constant
3507 - original_args_size.constant));
3508 #endif
3509 #endif
3511 #ifdef PUSH_ARGS_REVERSED
3512 argnum = nargs - 1;
3513 #else
3514 argnum = 0;
3515 #endif
3517 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
3519 /* Now load any reg parms into their regs. */
3521 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3522 are to be pushed. */
3523 for (count = 0; count < nargs; count++, argnum += inc)
3525 register rtx val = argvec[argnum].value;
3526 rtx reg = argvec[argnum].reg;
3527 int partial = argvec[argnum].partial;
3529 if (reg != 0 && partial == 0)
3530 emit_move_insn (reg, val);
3531 NO_DEFER_POP;
3534 #if 0
3535 /* For version 1.37, try deleting this entirely. */
3536 if (! no_queue)
3537 emit_queue ();
3538 #endif
3540 /* Any regs containing parms remain in use through the call. */
3541 for (count = 0; count < nargs; count++)
3542 if (argvec[count].reg != 0)
3543 use_reg (&call_fusage, argvec[count].reg);
3545 /* Pass the function the address in which to return a structure value. */
3546 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3548 emit_move_insn (struct_value_rtx,
3549 force_reg (Pmode,
3550 force_operand (XEXP (mem_value, 0),
3551 NULL_RTX)));
3552 if (GET_CODE (struct_value_rtx) == REG)
3553 use_reg (&call_fusage, struct_value_rtx);
3556 /* Don't allow popping to be deferred, since then
3557 cse'ing of library calls could delete a call and leave the pop. */
3558 NO_DEFER_POP;
3560 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3561 will set inhibit_defer_pop to that value. */
3562 /* See the comment in emit_library_call about the function type we build
3563 and pass here. */
3565 emit_call_1 (fun,
3566 get_identifier (XSTR (orgfun, 0)),
3567 build_function_type (type_for_mode (outmode, 0), NULL_TREE),
3568 original_args_size.constant, args_size.constant,
3569 struct_value_size,
3570 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3571 mem_value == 0 ? hard_libcall_value (outmode) : NULL_RTX,
3572 old_inhibit_defer_pop + 1, call_fusage, is_const);
3574 /* Now restore inhibit_defer_pop to its actual original value. */
3575 OK_DEFER_POP;
3577 pop_temp_slots ();
3579 /* Copy the value to the right place. */
3580 if (outmode != VOIDmode)
3582 if (mem_value)
3584 if (value == 0)
3585 value = mem_value;
3586 if (value != mem_value)
3587 emit_move_insn (value, mem_value);
3589 else if (value != 0)
3590 emit_move_insn (value, hard_libcall_value (outmode));
3591 else
3592 value = hard_libcall_value (outmode);
3595 #ifdef ACCUMULATE_OUTGOING_ARGS
3596 #ifdef REG_PARM_STACK_SPACE
3597 if (save_area)
3599 enum machine_mode save_mode = GET_MODE (save_area);
3600 #ifdef ARGS_GROW_DOWNWARD
3601 rtx stack_area
3602 = gen_rtx_MEM (save_mode,
3603 memory_address (save_mode,
3604 plus_constant (argblock,
3605 - high_to_save)));
3606 #else
3607 rtx stack_area
3608 = gen_rtx_MEM (save_mode,
3609 memory_address (save_mode,
3610 plus_constant (argblock, low_to_save)));
3611 #endif
3612 if (save_mode != BLKmode)
3613 emit_move_insn (stack_area, save_area);
3614 else
3615 emit_block_move (stack_area, validize_mem (save_area),
3616 GEN_INT (high_to_save - low_to_save + 1),
3617 PARM_BOUNDARY / BITS_PER_UNIT);
3619 #endif
3621 /* If we saved any argument areas, restore them. */
3622 for (count = 0; count < nargs; count++)
3623 if (argvec[count].save_area)
3625 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3626 rtx stack_area
3627 = gen_rtx_MEM (save_mode,
3628 memory_address
3629 (save_mode,
3630 plus_constant (argblock,
3631 argvec[count].offset.constant)));
3633 emit_move_insn (stack_area, argvec[count].save_area);
3636 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3637 stack_usage_map = initial_stack_usage_map;
3638 #endif
3640 return value;
3643 #if 0
3644 /* Return an rtx which represents a suitable home on the stack
3645 given TYPE, the type of the argument looking for a home.
3646 This is called only for BLKmode arguments.
3648 SIZE is the size needed for this target.
3649 ARGS_ADDR is the address of the bottom of the argument block for this call.
3650 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3651 if this machine uses push insns. */
3653 static rtx
3654 target_for_arg (type, size, args_addr, offset)
3655 tree type;
3656 rtx size;
3657 rtx args_addr;
3658 struct args_size offset;
3660 rtx target;
3661 rtx offset_rtx = ARGS_SIZE_RTX (offset);
3663 /* We do not call memory_address if possible,
3664 because we want to address as close to the stack
3665 as possible. For non-variable sized arguments,
3666 this will be stack-pointer relative addressing. */
3667 if (GET_CODE (offset_rtx) == CONST_INT)
3668 target = plus_constant (args_addr, INTVAL (offset_rtx));
3669 else
3671 /* I have no idea how to guarantee that this
3672 will work in the presence of register parameters. */
3673 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
3674 target = memory_address (QImode, target);
3677 return gen_rtx_MEM (BLKmode, target);
3679 #endif
3681 /* Store a single argument for a function call
3682 into the register or memory area where it must be passed.
3683 *ARG describes the argument value and where to pass it.
3685 ARGBLOCK is the address of the stack-block for all the arguments,
3686 or 0 on a machine where arguments are pushed individually.
3688 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3689 so must be careful about how the stack is used.
3691 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3692 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3693 that we need not worry about saving and restoring the stack.
3695 FNDECL is the declaration of the function we are calling. */
3697 static void
3698 store_one_arg (arg, argblock, may_be_alloca, variable_size,
3699 reg_parm_stack_space)
3700 struct arg_data *arg;
3701 rtx argblock;
3702 int may_be_alloca;
3703 int variable_size ATTRIBUTE_UNUSED;
3704 int reg_parm_stack_space;
3706 register tree pval = arg->tree_value;
3707 rtx reg = 0;
3708 int partial = 0;
3709 int used = 0;
3710 #ifdef ACCUMULATE_OUTGOING_ARGS
3711 int i, lower_bound = 0, upper_bound = 0;
3712 #endif
3714 if (TREE_CODE (pval) == ERROR_MARK)
3715 return;
3717 /* Push a new temporary level for any temporaries we make for
3718 this argument. */
3719 push_temp_slots ();
3721 #ifdef ACCUMULATE_OUTGOING_ARGS
3722 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3723 save any previous data at that location. */
3724 if (argblock && ! variable_size && arg->stack)
3726 #ifdef ARGS_GROW_DOWNWARD
3727 /* stack_slot is negative, but we want to index stack_usage_map
3728 with positive values. */
3729 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3730 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3731 else
3732 upper_bound = 0;
3734 lower_bound = upper_bound - arg->size.constant;
3735 #else
3736 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3737 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3738 else
3739 lower_bound = 0;
3741 upper_bound = lower_bound + arg->size.constant;
3742 #endif
3744 for (i = lower_bound; i < upper_bound; i++)
3745 if (stack_usage_map[i]
3746 /* Don't store things in the fixed argument area at this point;
3747 it has already been saved. */
3748 && i > reg_parm_stack_space)
3749 break;
3751 if (i != upper_bound)
3753 /* We need to make a save area. See what mode we can make it. */
3754 enum machine_mode save_mode
3755 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
3756 rtx stack_area
3757 = gen_rtx_MEM (save_mode,
3758 memory_address (save_mode,
3759 XEXP (arg->stack_slot, 0)));
3761 if (save_mode == BLKmode)
3763 arg->save_area = assign_stack_temp (BLKmode,
3764 arg->size.constant, 0);
3765 MEM_SET_IN_STRUCT_P (arg->save_area,
3766 AGGREGATE_TYPE_P (TREE_TYPE
3767 (arg->tree_value)));
3768 preserve_temp_slots (arg->save_area);
3769 emit_block_move (validize_mem (arg->save_area), stack_area,
3770 GEN_INT (arg->size.constant),
3771 PARM_BOUNDARY / BITS_PER_UNIT);
3773 else
3775 arg->save_area = gen_reg_rtx (save_mode);
3776 emit_move_insn (arg->save_area, stack_area);
3781 /* Now that we have saved any slots that will be overwritten by this
3782 store, mark all slots this store will use. We must do this before
3783 we actually expand the argument since the expansion itself may
3784 trigger library calls which might need to use the same stack slot. */
3785 if (argblock && ! variable_size && arg->stack)
3786 for (i = lower_bound; i < upper_bound; i++)
3787 stack_usage_map[i] = 1;
3788 #endif
3790 /* If this isn't going to be placed on both the stack and in registers,
3791 set up the register and number of words. */
3792 if (! arg->pass_on_stack)
3793 reg = arg->reg, partial = arg->partial;
3795 if (reg != 0 && partial == 0)
3796 /* Being passed entirely in a register. We shouldn't be called in
3797 this case. */
3798 abort ();
3800 /* If this arg needs special alignment, don't load the registers
3801 here. */
3802 if (arg->n_aligned_regs != 0)
3803 reg = 0;
3805 /* If this is being passed partially in a register, we can't evaluate
3806 it directly into its stack slot. Otherwise, we can. */
3807 if (arg->value == 0)
3809 #ifdef ACCUMULATE_OUTGOING_ARGS
3810 /* stack_arg_under_construction is nonzero if a function argument is
3811 being evaluated directly into the outgoing argument list and
3812 expand_call must take special action to preserve the argument list
3813 if it is called recursively.
3815 For scalar function arguments stack_usage_map is sufficient to
3816 determine which stack slots must be saved and restored. Scalar
3817 arguments in general have pass_on_stack == 0.
3819 If this argument is initialized by a function which takes the
3820 address of the argument (a C++ constructor or a C function
3821 returning a BLKmode structure), then stack_usage_map is
3822 insufficient and expand_call must push the stack around the
3823 function call. Such arguments have pass_on_stack == 1.
3825 Note that it is always safe to set stack_arg_under_construction,
3826 but this generates suboptimal code if set when not needed. */
3828 if (arg->pass_on_stack)
3829 stack_arg_under_construction++;
3830 #endif
3831 arg->value = expand_expr (pval,
3832 (partial
3833 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
3834 ? NULL_RTX : arg->stack,
3835 VOIDmode, 0);
3837 /* If we are promoting object (or for any other reason) the mode
3838 doesn't agree, convert the mode. */
3840 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
3841 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
3842 arg->value, arg->unsignedp);
3844 #ifdef ACCUMULATE_OUTGOING_ARGS
3845 if (arg->pass_on_stack)
3846 stack_arg_under_construction--;
3847 #endif
3850 /* Don't allow anything left on stack from computation
3851 of argument to alloca. */
3852 if (may_be_alloca)
3853 do_pending_stack_adjust ();
3855 if (arg->value == arg->stack)
3857 /* If the value is already in the stack slot, we are done. */
3858 if (current_function_check_memory_usage && GET_CODE (arg->stack) == MEM)
3860 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3861 XEXP (arg->stack, 0), Pmode,
3862 ARGS_SIZE_RTX (arg->size),
3863 TYPE_MODE (sizetype),
3864 GEN_INT (MEMORY_USE_RW),
3865 TYPE_MODE (integer_type_node));
3868 else if (arg->mode != BLKmode)
3870 register int size;
3872 /* Argument is a scalar, not entirely passed in registers.
3873 (If part is passed in registers, arg->partial says how much
3874 and emit_push_insn will take care of putting it there.)
3876 Push it, and if its size is less than the
3877 amount of space allocated to it,
3878 also bump stack pointer by the additional space.
3879 Note that in C the default argument promotions
3880 will prevent such mismatches. */
3882 size = GET_MODE_SIZE (arg->mode);
3883 /* Compute how much space the push instruction will push.
3884 On many machines, pushing a byte will advance the stack
3885 pointer by a halfword. */
3886 #ifdef PUSH_ROUNDING
3887 size = PUSH_ROUNDING (size);
3888 #endif
3889 used = size;
3891 /* Compute how much space the argument should get:
3892 round up to a multiple of the alignment for arguments. */
3893 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
3894 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
3895 / (PARM_BOUNDARY / BITS_PER_UNIT))
3896 * (PARM_BOUNDARY / BITS_PER_UNIT));
3898 /* This isn't already where we want it on the stack, so put it there.
3899 This can either be done with push or copy insns. */
3900 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
3901 partial, reg, used - size, argblock,
3902 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space);
3904 else
3906 /* BLKmode, at least partly to be pushed. */
3908 register int excess;
3909 rtx size_rtx;
3911 /* Pushing a nonscalar.
3912 If part is passed in registers, PARTIAL says how much
3913 and emit_push_insn will take care of putting it there. */
3915 /* Round its size up to a multiple
3916 of the allocation unit for arguments. */
3918 if (arg->size.var != 0)
3920 excess = 0;
3921 size_rtx = ARGS_SIZE_RTX (arg->size);
3923 else
3925 /* PUSH_ROUNDING has no effect on us, because
3926 emit_push_insn for BLKmode is careful to avoid it. */
3927 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
3928 + partial * UNITS_PER_WORD);
3929 size_rtx = expr_size (pval);
3932 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
3933 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
3934 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset),
3935 reg_parm_stack_space);
3939 /* Unless this is a partially-in-register argument, the argument is now
3940 in the stack.
3942 ??? Note that this can change arg->value from arg->stack to
3943 arg->stack_slot and it matters when they are not the same.
3944 It isn't totally clear that this is correct in all cases. */
3945 if (partial == 0)
3946 arg->value = arg->stack_slot;
3948 /* Once we have pushed something, pops can't safely
3949 be deferred during the rest of the arguments. */
3950 NO_DEFER_POP;
3952 /* ANSI doesn't require a sequence point here,
3953 but PCC has one, so this will avoid some problems. */
3954 emit_queue ();
3956 /* Free any temporary slots made in processing this argument. Show
3957 that we might have taken the address of something and pushed that
3958 as an operand. */
3959 preserve_temp_slots (NULL_RTX);
3960 free_temp_slots ();
3961 pop_temp_slots ();