* calls.c (precompute_arguments): Fix typo in comment.
[official-gcc.git] / gcc / calls.c
blob6a36ef01c6e110a6bbfb75cb4f4f6046e612de5d
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 92-97, 1998, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "rtl.h"
24 #include "tree.h"
25 #include "flags.h"
26 #include "expr.h"
27 #include "function.h"
28 #include "regs.h"
29 #include "insn-flags.h"
30 #include "toplev.h"
31 #include "output.h"
33 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
34 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
35 #endif
37 /* Decide whether a function's arguments should be processed
38 from first to last or from last to first.
40 They should if the stack and args grow in opposite directions, but
41 only if we have push insns. */
43 #ifdef PUSH_ROUNDING
45 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
46 #define PUSH_ARGS_REVERSED /* If it's last to first */
47 #endif
49 #endif
51 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
52 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
54 /* Data structure and subroutines used within expand_call. */
56 struct arg_data
58 /* Tree node for this argument. */
59 tree tree_value;
60 /* Mode for value; TYPE_MODE unless promoted. */
61 enum machine_mode mode;
62 /* Current RTL value for argument, or 0 if it isn't precomputed. */
63 rtx value;
64 /* Initially-compute RTL value for argument; only for const functions. */
65 rtx initial_value;
66 /* Register to pass this argument in, 0 if passed on stack, or an
67 PARALLEL if the arg is to be copied into multiple non-contiguous
68 registers. */
69 rtx reg;
70 /* If REG was promoted from the actual mode of the argument expression,
71 indicates whether the promotion is sign- or zero-extended. */
72 int unsignedp;
73 /* Number of registers to use. 0 means put the whole arg in registers.
74 Also 0 if not passed in registers. */
75 int partial;
76 /* Non-zero if argument must be passed on stack.
77 Note that some arguments may be passed on the stack
78 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
79 pass_on_stack identifies arguments that *cannot* go in registers. */
80 int pass_on_stack;
81 /* Offset of this argument from beginning of stack-args. */
82 struct args_size offset;
83 /* Similar, but offset to the start of the stack slot. Different from
84 OFFSET if this arg pads downward. */
85 struct args_size slot_offset;
86 /* Size of this argument on the stack, rounded up for any padding it gets,
87 parts of the argument passed in registers do not count.
88 If REG_PARM_STACK_SPACE is defined, then register parms
89 are counted here as well. */
90 struct args_size size;
91 /* Location on the stack at which parameter should be stored. The store
92 has already been done if STACK == VALUE. */
93 rtx stack;
94 /* Location on the stack of the start of this argument slot. This can
95 differ from STACK if this arg pads downward. This location is known
96 to be aligned to FUNCTION_ARG_BOUNDARY. */
97 rtx stack_slot;
98 #ifdef ACCUMULATE_OUTGOING_ARGS
99 /* Place that this stack area has been saved, if needed. */
100 rtx save_area;
101 #endif
102 /* If an argument's alignment does not permit direct copying into registers,
103 copy in smaller-sized pieces into pseudos. These are stored in a
104 block pointed to by this field. The next field says how many
105 word-sized pseudos we made. */
106 rtx *aligned_regs;
107 int n_aligned_regs;
110 #ifdef ACCUMULATE_OUTGOING_ARGS
111 /* A vector of one char per byte of stack space. A byte if non-zero if
112 the corresponding stack location has been used.
113 This vector is used to prevent a function call within an argument from
114 clobbering any stack already set up. */
115 static char *stack_usage_map;
117 /* Size of STACK_USAGE_MAP. */
118 static int highest_outgoing_arg_in_use;
120 /* stack_arg_under_construction is nonzero when an argument may be
121 initialized with a constructor call (including a C function that
122 returns a BLKmode struct) and expand_call must take special action
123 to make sure the object being constructed does not overlap the
124 argument list for the constructor call. */
125 int stack_arg_under_construction;
126 #endif
128 static int calls_function PROTO ((tree, int));
129 static int calls_function_1 PROTO ((tree, int));
130 static void emit_call_1 PROTO ((rtx, tree, tree, HOST_WIDE_INT,
131 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
132 rtx, int, rtx, int));
133 static void special_function_p PROTO ((char *, tree, int *, int *,
134 int *, int *));
135 static void precompute_register_parameters PROTO ((int, struct arg_data *,
136 int *));
137 static void store_one_arg PROTO ((struct arg_data *, rtx, int, int,
138 int));
139 static void store_unaligned_arguments_into_pseudos PROTO ((struct arg_data *,
140 int));
141 static int finalize_must_preallocate PROTO ((int, int,
142 struct arg_data *,
143 struct args_size *));
144 static void precompute_arguments PROTO ((int, int, int,
145 struct arg_data *,
146 struct args_size *));
147 static int compute_argument_block_size PROTO ((int,
148 struct args_size *));
149 static void initialize_argument_information PROTO ((int,
150 struct arg_data *,
151 struct args_size *,
152 int, tree, tree,
153 CUMULATIVE_ARGS *,
154 int, rtx *, int *,
155 int *, int *));
156 static void compute_argument_addresses PROTO ((struct arg_data *,
157 rtx, int));
158 static rtx rtx_for_function_call PROTO ((tree, tree));
159 static void load_register_parameters PROTO ((struct arg_data *,
160 int, rtx *));
162 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
163 static rtx save_fixed_argument_area PROTO ((int, rtx, int *, int *));
164 static void restore_fixed_argument_area PROTO ((rtx, rtx, int, int));
165 #endif
167 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
168 `alloca'.
170 If WHICH is 0, return 1 if EXP contains a call to any function.
171 Actually, we only need return 1 if evaluating EXP would require pushing
172 arguments on the stack, but that is too difficult to compute, so we just
173 assume any function call might require the stack. */
175 static tree calls_function_save_exprs;
177 static int
178 calls_function (exp, which)
179 tree exp;
180 int which;
182 int val;
183 calls_function_save_exprs = 0;
184 val = calls_function_1 (exp, which);
185 calls_function_save_exprs = 0;
186 return val;
189 static int
190 calls_function_1 (exp, which)
191 tree exp;
192 int which;
194 register int i;
195 enum tree_code code = TREE_CODE (exp);
196 int type = TREE_CODE_CLASS (code);
197 int length = tree_code_length[(int) code];
199 /* If this code is language-specific, we don't know what it will do. */
200 if ((int) code >= NUM_TREE_CODES)
201 return 1;
203 /* Only expressions and references can contain calls. */
204 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
205 && type != 'b')
206 return 0;
208 switch (code)
210 case CALL_EXPR:
211 if (which == 0)
212 return 1;
213 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
214 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
215 == FUNCTION_DECL))
217 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
219 if ((DECL_BUILT_IN (fndecl)
220 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
221 || (DECL_SAVED_INSNS (fndecl)
222 && DECL_SAVED_INSNS (fndecl)->calls_alloca))
223 return 1;
226 /* Third operand is RTL. */
227 length = 2;
228 break;
230 case SAVE_EXPR:
231 if (SAVE_EXPR_RTL (exp) != 0)
232 return 0;
233 if (value_member (exp, calls_function_save_exprs))
234 return 0;
235 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
236 calls_function_save_exprs);
237 return (TREE_OPERAND (exp, 0) != 0
238 && calls_function_1 (TREE_OPERAND (exp, 0), which));
240 case BLOCK:
242 register tree local;
244 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
245 if (DECL_INITIAL (local) != 0
246 && calls_function_1 (DECL_INITIAL (local), which))
247 return 1;
250 register tree subblock;
252 for (subblock = BLOCK_SUBBLOCKS (exp);
253 subblock;
254 subblock = TREE_CHAIN (subblock))
255 if (calls_function_1 (subblock, which))
256 return 1;
258 return 0;
260 case METHOD_CALL_EXPR:
261 length = 3;
262 break;
264 case WITH_CLEANUP_EXPR:
265 length = 1;
266 break;
268 case RTL_EXPR:
269 return 0;
271 default:
272 break;
275 for (i = 0; i < length; i++)
276 if (TREE_OPERAND (exp, i) != 0
277 && calls_function_1 (TREE_OPERAND (exp, i), which))
278 return 1;
280 return 0;
283 /* Force FUNEXP into a form suitable for the address of a CALL,
284 and return that as an rtx. Also load the static chain register
285 if FNDECL is a nested function.
287 CALL_FUSAGE points to a variable holding the prospective
288 CALL_INSN_FUNCTION_USAGE information. */
291 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
292 rtx funexp;
293 tree fndecl;
294 rtx *call_fusage;
295 int reg_parm_seen;
297 rtx static_chain_value = 0;
299 funexp = protect_from_queue (funexp, 0);
301 if (fndecl != 0)
302 /* Get possible static chain value for nested function in C. */
303 static_chain_value = lookup_static_chain (fndecl);
305 /* Make a valid memory address and copy constants thru pseudo-regs,
306 but not for a constant address if -fno-function-cse. */
307 if (GET_CODE (funexp) != SYMBOL_REF)
308 /* If we are using registers for parameters, force the
309 function address into a register now. */
310 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
311 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
312 : memory_address (FUNCTION_MODE, funexp));
313 else
315 #ifndef NO_FUNCTION_CSE
316 if (optimize && ! flag_no_function_cse)
317 #ifdef NO_RECURSIVE_FUNCTION_CSE
318 if (fndecl != current_function_decl)
319 #endif
320 funexp = force_reg (Pmode, funexp);
321 #endif
324 if (static_chain_value != 0)
326 emit_move_insn (static_chain_rtx, static_chain_value);
328 if (GET_CODE (static_chain_rtx) == REG)
329 use_reg (call_fusage, static_chain_rtx);
332 return funexp;
335 /* Generate instructions to call function FUNEXP,
336 and optionally pop the results.
337 The CALL_INSN is the first insn generated.
339 FNDECL is the declaration node of the function. This is given to the
340 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
342 FUNTYPE is the data type of the function. This is given to the macro
343 RETURN_POPS_ARGS to determine whether this function pops its own args.
344 We used to allow an identifier for library functions, but that doesn't
345 work when the return type is an aggregate type and the calling convention
346 says that the pointer to this aggregate is to be popped by the callee.
348 STACK_SIZE is the number of bytes of arguments on the stack,
349 rounded up to PREFERRED_STACK_BOUNDARY; zero if the size is variable.
350 This is both to put into the call insn and
351 to generate explicit popping code if necessary.
353 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
354 It is zero if this call doesn't want a structure value.
356 NEXT_ARG_REG is the rtx that results from executing
357 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
358 just after all the args have had their registers assigned.
359 This could be whatever you like, but normally it is the first
360 arg-register beyond those used for args in this call,
361 or 0 if all the arg-registers are used in this call.
362 It is passed on to `gen_call' so you can put this info in the call insn.
364 VALREG is a hard register in which a value is returned,
365 or 0 if the call does not return a value.
367 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
368 the args to this call were processed.
369 We restore `inhibit_defer_pop' to that value.
371 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
372 denote registers used by the called function.
374 IS_CONST is true if this is a `const' call. */
376 static void
377 emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
378 struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
379 call_fusage, is_const)
380 rtx funexp;
381 tree fndecl ATTRIBUTE_UNUSED;
382 tree funtype ATTRIBUTE_UNUSED;
383 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED;
384 HOST_WIDE_INT rounded_stack_size;
385 HOST_WIDE_INT struct_value_size;
386 rtx next_arg_reg;
387 rtx valreg;
388 int old_inhibit_defer_pop;
389 rtx call_fusage;
390 int is_const;
392 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
393 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
394 rtx call_insn;
395 #ifndef ACCUMULATE_OUTGOING_ARGS
396 int already_popped = 0;
397 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
398 #endif
400 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
401 and we don't want to load it into a register as an optimization,
402 because prepare_call_address already did it if it should be done. */
403 if (GET_CODE (funexp) != SYMBOL_REF)
404 funexp = memory_address (FUNCTION_MODE, funexp);
406 #ifndef ACCUMULATE_OUTGOING_ARGS
407 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
408 /* If the target has "call" or "call_value" insns, then prefer them
409 if no arguments are actually popped. If the target does not have
410 "call" or "call_value" insns, then we must use the popping versions
411 even if the call has no arguments to pop. */
412 #if defined (HAVE_call) && defined (HAVE_call_value)
413 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
414 && n_popped > 0)
415 #else
416 if (HAVE_call_pop && HAVE_call_value_pop)
417 #endif
419 rtx n_pop = GEN_INT (n_popped);
420 rtx pat;
422 /* If this subroutine pops its own args, record that in the call insn
423 if possible, for the sake of frame pointer elimination. */
425 if (valreg)
426 pat = gen_call_value_pop (valreg,
427 gen_rtx_MEM (FUNCTION_MODE, funexp),
428 rounded_stack_size_rtx, next_arg_reg, n_pop);
429 else
430 pat = gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
431 rounded_stack_size_rtx, next_arg_reg, n_pop);
433 emit_call_insn (pat);
434 already_popped = 1;
436 else
437 #endif
438 #endif
440 #if defined (HAVE_call) && defined (HAVE_call_value)
441 if (HAVE_call && HAVE_call_value)
443 if (valreg)
444 emit_call_insn (gen_call_value (valreg,
445 gen_rtx_MEM (FUNCTION_MODE, funexp),
446 rounded_stack_size_rtx, next_arg_reg,
447 NULL_RTX));
448 else
449 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, funexp),
450 rounded_stack_size_rtx, next_arg_reg,
451 struct_value_size_rtx));
453 else
454 #endif
455 abort ();
457 /* Find the CALL insn we just emitted. */
458 for (call_insn = get_last_insn ();
459 call_insn && GET_CODE (call_insn) != CALL_INSN;
460 call_insn = PREV_INSN (call_insn))
463 if (! call_insn)
464 abort ();
466 /* Put the register usage information on the CALL. If there is already
467 some usage information, put ours at the end. */
468 if (CALL_INSN_FUNCTION_USAGE (call_insn))
470 rtx link;
472 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
473 link = XEXP (link, 1))
476 XEXP (link, 1) = call_fusage;
478 else
479 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
481 /* If this is a const call, then set the insn's unchanging bit. */
482 if (is_const)
483 CONST_CALL_P (call_insn) = 1;
485 /* Restore this now, so that we do defer pops for this call's args
486 if the context of the call as a whole permits. */
487 inhibit_defer_pop = old_inhibit_defer_pop;
489 #ifndef ACCUMULATE_OUTGOING_ARGS
490 /* If returning from the subroutine does not automatically pop the args,
491 we need an instruction to pop them sooner or later.
492 Perhaps do it now; perhaps just record how much space to pop later.
494 If returning from the subroutine does pop the args, indicate that the
495 stack pointer will be changed. */
497 if (n_popped > 0)
499 if (!already_popped)
500 CALL_INSN_FUNCTION_USAGE (call_insn)
501 = gen_rtx_EXPR_LIST (VOIDmode,
502 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
503 CALL_INSN_FUNCTION_USAGE (call_insn));
504 rounded_stack_size -= n_popped;
505 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
508 if (rounded_stack_size != 0)
510 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
511 pending_stack_adjust += rounded_stack_size;
512 else
513 adjust_stack (rounded_stack_size_rtx);
515 #endif
518 /* Determine if the function identified by NAME and FNDECL is one with
519 special properties we wish to know about.
521 For example, if the function might return more than one time (setjmp), then
522 set RETURNS_TWICE to a nonzero value.
524 Similarly set IS_LONGJMP for if the function is in the longjmp family.
526 Set IS_MALLOC for any of the standard memory allocation functions which
527 allocate from the heap.
529 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
530 space from the stack such as alloca. */
532 static void
533 special_function_p (name, fndecl, returns_twice, is_longjmp,
534 is_malloc, may_be_alloca)
535 char *name;
536 tree fndecl;
537 int *returns_twice;
538 int *is_longjmp;
539 int *is_malloc;
540 int *may_be_alloca;
542 *returns_twice = 0;
543 *is_longjmp = 0;
544 *is_malloc = 0;
545 *may_be_alloca = 0;
547 if (name != 0 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
548 /* Exclude functions not at the file scope, or not `extern',
549 since they are not the magic functions we would otherwise
550 think they are. */
551 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
553 char *tname = name;
555 /* We assume that alloca will always be called by name. It
556 makes no sense to pass it as a pointer-to-function to
557 anything that does not understand its behavior. */
558 *may_be_alloca
559 = (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
560 && name[0] == 'a'
561 && ! strcmp (name, "alloca"))
562 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
563 && name[0] == '_'
564 && ! strcmp (name, "__builtin_alloca"))));
566 /* Disregard prefix _, __ or __x. */
567 if (name[0] == '_')
569 if (name[1] == '_' && name[2] == 'x')
570 tname += 3;
571 else if (name[1] == '_')
572 tname += 2;
573 else
574 tname += 1;
577 if (tname[0] == 's')
579 *returns_twice
580 = ((tname[1] == 'e'
581 && (! strcmp (tname, "setjmp")
582 || ! strcmp (tname, "setjmp_syscall")))
583 || (tname[1] == 'i'
584 && ! strcmp (tname, "sigsetjmp"))
585 || (tname[1] == 'a'
586 && ! strcmp (tname, "savectx")));
587 if (tname[1] == 'i'
588 && ! strcmp (tname, "siglongjmp"))
589 *is_longjmp = 1;
591 else if ((tname[0] == 'q' && tname[1] == 's'
592 && ! strcmp (tname, "qsetjmp"))
593 || (tname[0] == 'v' && tname[1] == 'f'
594 && ! strcmp (tname, "vfork")))
595 *returns_twice = 1;
597 else if (tname[0] == 'l' && tname[1] == 'o'
598 && ! strcmp (tname, "longjmp"))
599 *is_longjmp = 1;
600 /* XXX should have "malloc" attribute on functions instead
601 of recognizing them by name. */
602 else if (! strcmp (tname, "malloc")
603 || ! strcmp (tname, "calloc")
604 || ! strcmp (tname, "realloc")
605 /* Note use of NAME rather than TNAME here. These functions
606 are only reserved when preceded with __. */
607 || ! strcmp (name, "__vn") /* mangled __builtin_vec_new */
608 || ! strcmp (name, "__nw") /* mangled __builtin_new */
609 || ! strcmp (name, "__builtin_new")
610 || ! strcmp (name, "__builtin_vec_new"))
611 *is_malloc = 1;
615 /* Precompute all register parameters as described by ARGS, storing values
616 into fields within the ARGS array.
618 NUM_ACTUALS indicates the total number elements in the ARGS array.
620 Set REG_PARM_SEEN if we encounter a register parameter. */
622 static void
623 precompute_register_parameters (num_actuals, args, reg_parm_seen)
624 int num_actuals;
625 struct arg_data *args;
626 int *reg_parm_seen;
628 int i;
630 *reg_parm_seen = 0;
632 for (i = 0; i < num_actuals; i++)
633 if (args[i].reg != 0 && ! args[i].pass_on_stack)
635 *reg_parm_seen = 1;
637 if (args[i].value == 0)
639 push_temp_slots ();
640 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
641 VOIDmode, 0);
642 preserve_temp_slots (args[i].value);
643 pop_temp_slots ();
645 /* ANSI doesn't require a sequence point here,
646 but PCC has one, so this will avoid some problems. */
647 emit_queue ();
650 /* If we are to promote the function arg to a wider mode,
651 do it now. */
653 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
654 args[i].value
655 = convert_modes (args[i].mode,
656 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
657 args[i].value, args[i].unsignedp);
659 /* If the value is expensive, and we are inside an appropriately
660 short loop, put the value into a pseudo and then put the pseudo
661 into the hard reg.
663 For small register classes, also do this if this call uses
664 register parameters. This is to avoid reload conflicts while
665 loading the parameters registers. */
667 if ((! (GET_CODE (args[i].value) == REG
668 || (GET_CODE (args[i].value) == SUBREG
669 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
670 && args[i].mode != BLKmode
671 && rtx_cost (args[i].value, SET) > 2
672 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
673 || preserve_subexpressions_p ()))
674 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
678 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
680 /* The argument list is the property of the called routine and it
681 may clobber it. If the fixed area has been used for previous
682 parameters, we must save and restore it. */
683 static rtx
684 save_fixed_argument_area (reg_parm_stack_space, argblock,
685 low_to_save, high_to_save)
686 int reg_parm_stack_space;
687 rtx argblock;
688 int *low_to_save;
689 int *high_to_save;
691 int i;
692 rtx save_area = NULL_RTX;
694 /* Compute the boundary of the that needs to be saved, if any. */
695 #ifdef ARGS_GROW_DOWNWARD
696 for (i = 0; i < reg_parm_stack_space + 1; i++)
697 #else
698 for (i = 0; i < reg_parm_stack_space; i++)
699 #endif
701 if (i >= highest_outgoing_arg_in_use
702 || stack_usage_map[i] == 0)
703 continue;
705 if (*low_to_save == -1)
706 *low_to_save = i;
708 *high_to_save = i;
711 if (*low_to_save >= 0)
713 int num_to_save = *high_to_save - *low_to_save + 1;
714 enum machine_mode save_mode
715 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
716 rtx stack_area;
718 /* If we don't have the required alignment, must do this in BLKmode. */
719 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
720 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
721 save_mode = BLKmode;
723 #ifdef ARGS_GROW_DOWNWARD
724 stack_area = gen_rtx_MEM (save_mode,
725 memory_address (save_mode,
726 plus_constant (argblock,
727 - *high_to_save)));
728 #else
729 stack_area = gen_rtx_MEM (save_mode,
730 memory_address (save_mode,
731 plus_constant (argblock,
732 *low_to_save)));
733 #endif
734 if (save_mode == BLKmode)
736 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
737 emit_block_move (validize_mem (save_area), stack_area,
738 GEN_INT (num_to_save),
739 PARM_BOUNDARY / BITS_PER_UNIT);
741 else
743 save_area = gen_reg_rtx (save_mode);
744 emit_move_insn (save_area, stack_area);
747 return save_area;
750 static void
751 restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
752 rtx save_area;
753 rtx argblock;
754 int high_to_save;
755 int low_to_save;
757 enum machine_mode save_mode = GET_MODE (save_area);
758 #ifdef ARGS_GROW_DOWNWARD
759 rtx stack_area
760 = gen_rtx_MEM (save_mode,
761 memory_address (save_mode,
762 plus_constant (argblock,
763 - high_to_save)));
764 #else
765 rtx stack_area
766 = gen_rtx_MEM (save_mode,
767 memory_address (save_mode,
768 plus_constant (argblock,
769 low_to_save)));
770 #endif
772 if (save_mode != BLKmode)
773 emit_move_insn (stack_area, save_area);
774 else
775 emit_block_move (stack_area, validize_mem (save_area),
776 GEN_INT (high_to_save - low_to_save + 1),
777 PARM_BOUNDARY / BITS_PER_UNIT);
779 #endif
781 /* If any elements in ARGS refer to parameters that are to be passed in
782 registers, but not in memory, and whose alignment does not permit a
783 direct copy into registers. Copy the values into a group of pseudos
784 which we will later copy into the appropriate hard registers.
786 Pseudos for each unaligned argument will be stored into the array
787 args[argnum].aligned_regs. The caller is responsible for deallocating
788 the aligned_regs array if it is nonzero. */
790 static void
791 store_unaligned_arguments_into_pseudos (args, num_actuals)
792 struct arg_data *args;
793 int num_actuals;
795 int i, j;
797 for (i = 0; i < num_actuals; i++)
798 if (args[i].reg != 0 && ! args[i].pass_on_stack
799 && args[i].mode == BLKmode
800 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
801 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
803 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
804 int big_endian_correction = 0;
806 args[i].n_aligned_regs
807 = args[i].partial ? args[i].partial
808 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
810 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
811 * args[i].n_aligned_regs);
813 /* Structures smaller than a word are aligned to the least
814 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
815 this means we must skip the empty high order bytes when
816 calculating the bit offset. */
817 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
818 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
820 for (j = 0; j < args[i].n_aligned_regs; j++)
822 rtx reg = gen_reg_rtx (word_mode);
823 rtx word = operand_subword_force (args[i].value, j, BLKmode);
824 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
825 int bitalign = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
827 args[i].aligned_regs[j] = reg;
829 /* There is no need to restrict this code to loading items
830 in TYPE_ALIGN sized hunks. The bitfield instructions can
831 load up entire word sized registers efficiently.
833 ??? This may not be needed anymore.
834 We use to emit a clobber here but that doesn't let later
835 passes optimize the instructions we emit. By storing 0 into
836 the register later passes know the first AND to zero out the
837 bitfield being set in the register is unnecessary. The store
838 of 0 will be deleted as will at least the first AND. */
840 emit_move_insn (reg, const0_rtx);
842 bytes -= bitsize / BITS_PER_UNIT;
843 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
844 extract_bit_field (word, bitsize, 0, 1,
845 NULL_RTX, word_mode,
846 word_mode,
847 bitalign / BITS_PER_UNIT,
848 BITS_PER_WORD),
849 bitalign / BITS_PER_UNIT, BITS_PER_WORD);
854 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
855 ACTPARMS.
857 NUM_ACTUALS is the total number of parameters.
859 N_NAMED_ARGS is the total number of named arguments.
861 FNDECL is the tree code for the target of this call (if known)
863 ARGS_SO_FAR holds state needed by the target to know where to place
864 the next argument.
866 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
867 for arguments which are passed in registers.
869 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
870 and may be modified by this routine.
872 OLD_PENDING_ADJ, MUST_PREALLOCATE and IS_CONST are pointers to integer
873 flags which may may be modified by this routine. */
875 static void
876 initialize_argument_information (num_actuals, args, args_size, n_named_args,
877 actparms, fndecl, args_so_far,
878 reg_parm_stack_space, old_stack_level,
879 old_pending_adj, must_preallocate, is_const)
880 int num_actuals ATTRIBUTE_UNUSED;
881 struct arg_data *args;
882 struct args_size *args_size;
883 int n_named_args ATTRIBUTE_UNUSED;
884 tree actparms;
885 tree fndecl;
886 CUMULATIVE_ARGS *args_so_far;
887 int reg_parm_stack_space;
888 rtx *old_stack_level;
889 int *old_pending_adj;
890 int *must_preallocate;
891 int *is_const;
893 /* 1 if scanning parms front to back, -1 if scanning back to front. */
894 int inc;
896 /* Count arg position in order args appear. */
897 int argpos;
899 int i;
900 tree p;
902 args_size->constant = 0;
903 args_size->var = 0;
905 /* In this loop, we consider args in the order they are written.
906 We fill up ARGS from the front or from the back if necessary
907 so that in any case the first arg to be pushed ends up at the front. */
909 #ifdef PUSH_ARGS_REVERSED
910 i = num_actuals - 1, inc = -1;
911 /* In this case, must reverse order of args
912 so that we compute and push the last arg first. */
913 #else
914 i = 0, inc = 1;
915 #endif
917 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
918 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
920 tree type = TREE_TYPE (TREE_VALUE (p));
921 int unsignedp;
922 enum machine_mode mode;
924 args[i].tree_value = TREE_VALUE (p);
926 /* Replace erroneous argument with constant zero. */
927 if (type == error_mark_node || TYPE_SIZE (type) == 0)
928 args[i].tree_value = integer_zero_node, type = integer_type_node;
930 /* If TYPE is a transparent union, pass things the way we would
931 pass the first field of the union. We have already verified that
932 the modes are the same. */
933 if (TYPE_TRANSPARENT_UNION (type))
934 type = TREE_TYPE (TYPE_FIELDS (type));
936 /* Decide where to pass this arg.
938 args[i].reg is nonzero if all or part is passed in registers.
940 args[i].partial is nonzero if part but not all is passed in registers,
941 and the exact value says how many words are passed in registers.
943 args[i].pass_on_stack is nonzero if the argument must at least be
944 computed on the stack. It may then be loaded back into registers
945 if args[i].reg is nonzero.
947 These decisions are driven by the FUNCTION_... macros and must agree
948 with those made by function.c. */
950 /* See if this argument should be passed by invisible reference. */
951 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
952 && contains_placeholder_p (TYPE_SIZE (type)))
953 || TREE_ADDRESSABLE (type)
954 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
955 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
956 type, argpos < n_named_args)
957 #endif
960 /* If we're compiling a thunk, pass through invisible
961 references instead of making a copy. */
962 if (current_function_is_thunk
963 #ifdef FUNCTION_ARG_CALLEE_COPIES
964 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
965 type, argpos < n_named_args)
966 /* If it's in a register, we must make a copy of it too. */
967 /* ??? Is this a sufficient test? Is there a better one? */
968 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
969 && REG_P (DECL_RTL (args[i].tree_value)))
970 && ! TREE_ADDRESSABLE (type))
971 #endif
974 /* C++ uses a TARGET_EXPR to indicate that we want to make a
975 new object from the argument. If we are passing by
976 invisible reference, the callee will do that for us, so we
977 can strip off the TARGET_EXPR. This is not always safe,
978 but it is safe in the only case where this is a useful
979 optimization; namely, when the argument is a plain object.
980 In that case, the frontend is just asking the backend to
981 make a bitwise copy of the argument. */
983 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
984 && (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND
985 (args[i].tree_value, 1)))
986 == 'd')
987 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
988 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
990 args[i].tree_value = build1 (ADDR_EXPR,
991 build_pointer_type (type),
992 args[i].tree_value);
993 type = build_pointer_type (type);
995 else
997 /* We make a copy of the object and pass the address to the
998 function being called. */
999 rtx copy;
1001 if (TYPE_SIZE (type) == 0
1002 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1003 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1004 && (TREE_INT_CST_HIGH (TYPE_SIZE (type)) != 0
1005 || (TREE_INT_CST_LOW (TYPE_SIZE (type))
1006 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
1008 /* This is a variable-sized object. Make space on the stack
1009 for it. */
1010 rtx size_rtx = expr_size (TREE_VALUE (p));
1012 if (*old_stack_level == 0)
1014 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1015 *old_pending_adj = pending_stack_adjust;
1016 pending_stack_adjust = 0;
1019 copy = gen_rtx_MEM (BLKmode,
1020 allocate_dynamic_stack_space (size_rtx,
1021 NULL_RTX,
1022 TYPE_ALIGN (type)));
1024 else
1026 int size = int_size_in_bytes (type);
1027 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
1030 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
1032 store_expr (args[i].tree_value, copy, 0);
1033 *is_const = 0;
1035 args[i].tree_value = build1 (ADDR_EXPR,
1036 build_pointer_type (type),
1037 make_tree (type, copy));
1038 type = build_pointer_type (type);
1042 mode = TYPE_MODE (type);
1043 unsignedp = TREE_UNSIGNED (type);
1045 #ifdef PROMOTE_FUNCTION_ARGS
1046 mode = promote_mode (type, mode, &unsignedp, 1);
1047 #endif
1049 args[i].unsignedp = unsignedp;
1050 args[i].mode = mode;
1051 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1052 argpos < n_named_args);
1053 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1054 if (args[i].reg)
1055 args[i].partial
1056 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1057 argpos < n_named_args);
1058 #endif
1060 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1062 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1063 it means that we are to pass this arg in the register(s) designated
1064 by the PARALLEL, but also to pass it in the stack. */
1065 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1066 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1067 args[i].pass_on_stack = 1;
1069 /* If this is an addressable type, we must preallocate the stack
1070 since we must evaluate the object into its final location.
1072 If this is to be passed in both registers and the stack, it is simpler
1073 to preallocate. */
1074 if (TREE_ADDRESSABLE (type)
1075 || (args[i].pass_on_stack && args[i].reg != 0))
1076 *must_preallocate = 1;
1078 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1079 we cannot consider this function call constant. */
1080 if (TREE_ADDRESSABLE (type))
1081 *is_const = 0;
1083 /* Compute the stack-size of this argument. */
1084 if (args[i].reg == 0 || args[i].partial != 0
1085 || reg_parm_stack_space > 0
1086 || args[i].pass_on_stack)
1087 locate_and_pad_parm (mode, type,
1088 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1090 #else
1091 args[i].reg != 0,
1092 #endif
1093 fndecl, args_size, &args[i].offset,
1094 &args[i].size);
1096 #ifndef ARGS_GROW_DOWNWARD
1097 args[i].slot_offset = *args_size;
1098 #endif
1100 /* If a part of the arg was put into registers,
1101 don't include that part in the amount pushed. */
1102 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1103 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1104 / (PARM_BOUNDARY / BITS_PER_UNIT)
1105 * (PARM_BOUNDARY / BITS_PER_UNIT));
1107 /* Update ARGS_SIZE, the total stack space for args so far. */
1109 args_size->constant += args[i].size.constant;
1110 if (args[i].size.var)
1112 ADD_PARM_SIZE (*args_size, args[i].size.var);
1115 /* Since the slot offset points to the bottom of the slot,
1116 we must record it after incrementing if the args grow down. */
1117 #ifdef ARGS_GROW_DOWNWARD
1118 args[i].slot_offset = *args_size;
1120 args[i].slot_offset.constant = -args_size->constant;
1121 if (args_size->var)
1123 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
1125 #endif
1127 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1128 have been used, etc. */
1130 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1131 argpos < n_named_args);
1135 /* Update ARGS_SIZE to contain the total size for the argument block.
1136 Return the original constant component of the argument block's size.
1138 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1139 for arguments passed in registers. */
1141 static int
1142 compute_argument_block_size (reg_parm_stack_space, args_size)
1143 int reg_parm_stack_space;
1144 struct args_size *args_size;
1146 int unadjusted_args_size = args_size->constant;
1148 /* Compute the actual size of the argument block required. The variable
1149 and constant sizes must be combined, the size may have to be rounded,
1150 and there may be a minimum required size. */
1152 if (args_size->var)
1154 args_size->var = ARGS_SIZE_TREE (*args_size);
1155 args_size->constant = 0;
1157 #ifdef PREFERRED_STACK_BOUNDARY
1158 if (PREFERRED_STACK_BOUNDARY != BITS_PER_UNIT)
1159 args_size->var = round_up (args_size->var, STACK_BYTES);
1160 #endif
1162 if (reg_parm_stack_space > 0)
1164 args_size->var
1165 = size_binop (MAX_EXPR, args_size->var,
1166 size_int (reg_parm_stack_space));
1168 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1169 /* The area corresponding to register parameters is not to count in
1170 the size of the block we need. So make the adjustment. */
1171 args_size->var
1172 = size_binop (MINUS_EXPR, args_size->var,
1173 size_int (reg_parm_stack_space));
1174 #endif
1177 else
1179 #ifdef PREFERRED_STACK_BOUNDARY
1180 args_size->constant = (((args_size->constant
1181 + pending_stack_adjust
1182 + STACK_BYTES - 1)
1183 / STACK_BYTES * STACK_BYTES)
1184 - pending_stack_adjust);
1185 #endif
1187 args_size->constant = MAX (args_size->constant,
1188 reg_parm_stack_space);
1190 #ifdef MAYBE_REG_PARM_STACK_SPACE
1191 if (reg_parm_stack_space == 0)
1192 args_size->constant = 0;
1193 #endif
1195 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1196 args_size->constant -= reg_parm_stack_space;
1197 #endif
1199 return unadjusted_args_size;
1202 /* Precompute parameters as needed for a function call.
1204 IS_CONST indicates the target function is a pure function.
1206 MUST_PREALLOCATE indicates that we must preallocate stack space for
1207 any stack arguments.
1209 NUM_ACTUALS is the number of arguments.
1211 ARGS is an array containing information for each argument; this routine
1212 fills in the INITIAL_VALUE and VALUE fields for each precomputed argument.
1214 ARGS_SIZE contains information about the size of the arg list. */
1216 static void
1217 precompute_arguments (is_const, must_preallocate, num_actuals, args, args_size)
1218 int is_const;
1219 int must_preallocate;
1220 int num_actuals;
1221 struct arg_data *args;
1222 struct args_size *args_size;
1224 int i;
1226 /* If this function call is cse'able, precompute all the parameters.
1227 Note that if the parameter is constructed into a temporary, this will
1228 cause an additional copy because the parameter will be constructed
1229 into a temporary location and then copied into the outgoing arguments.
1230 If a parameter contains a call to alloca and this function uses the
1231 stack, precompute the parameter. */
1233 /* If we preallocated the stack space, and some arguments must be passed
1234 on the stack, then we must precompute any parameter which contains a
1235 function call which will store arguments on the stack.
1236 Otherwise, evaluating the parameter may clobber previous parameters
1237 which have already been stored into the stack. */
1239 for (i = 0; i < num_actuals; i++)
1240 if (is_const
1241 || ((args_size->var != 0 || args_size->constant != 0)
1242 && calls_function (args[i].tree_value, 1))
1243 || (must_preallocate
1244 && (args_size->var != 0 || args_size->constant != 0)
1245 && calls_function (args[i].tree_value, 0)))
1247 /* If this is an addressable type, we cannot pre-evaluate it. */
1248 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1249 abort ();
1251 push_temp_slots ();
1253 args[i].initial_value = args[i].value
1254 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1256 preserve_temp_slots (args[i].value);
1257 pop_temp_slots ();
1259 /* ANSI doesn't require a sequence point here,
1260 but PCC has one, so this will avoid some problems. */
1261 emit_queue ();
1263 args[i].initial_value = args[i].value
1264 = protect_from_queue (args[i].initial_value, 0);
1266 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1267 args[i].value
1268 = convert_modes (args[i].mode,
1269 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1270 args[i].value, args[i].unsignedp);
1274 /* Given the current state of MUST_PREALLOCATE and information about
1275 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1276 compute and return the final value for MUST_PREALLOCATE. */
1278 static int
1279 finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1280 int must_preallocate;
1281 int num_actuals;
1282 struct arg_data *args;
1283 struct args_size *args_size;
1285 /* See if we have or want to preallocate stack space.
1287 If we would have to push a partially-in-regs parm
1288 before other stack parms, preallocate stack space instead.
1290 If the size of some parm is not a multiple of the required stack
1291 alignment, we must preallocate.
1293 If the total size of arguments that would otherwise create a copy in
1294 a temporary (such as a CALL) is more than half the total argument list
1295 size, preallocation is faster.
1297 Another reason to preallocate is if we have a machine (like the m88k)
1298 where stack alignment is required to be maintained between every
1299 pair of insns, not just when the call is made. However, we assume here
1300 that such machines either do not have push insns (and hence preallocation
1301 would occur anyway) or the problem is taken care of with
1302 PUSH_ROUNDING. */
1304 if (! must_preallocate)
1306 int partial_seen = 0;
1307 int copy_to_evaluate_size = 0;
1308 int i;
1310 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1312 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1313 partial_seen = 1;
1314 else if (partial_seen && args[i].reg == 0)
1315 must_preallocate = 1;
1317 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1318 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1319 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1320 || TREE_CODE (args[i].tree_value) == COND_EXPR
1321 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1322 copy_to_evaluate_size
1323 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1326 if (copy_to_evaluate_size * 2 >= args_size->constant
1327 && args_size->constant > 0)
1328 must_preallocate = 1;
1330 return must_preallocate;
1333 /* If we preallocated stack space, compute the address of each argument
1334 and store it into the ARGS array.
1336 We need not ensure it is a valid memory address here; it will be
1337 validized when it is used.
1339 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1341 static void
1342 compute_argument_addresses (args, argblock, num_actuals)
1343 struct arg_data *args;
1344 rtx argblock;
1345 int num_actuals;
1347 if (argblock)
1349 rtx arg_reg = argblock;
1350 int i, arg_offset = 0;
1352 if (GET_CODE (argblock) == PLUS)
1353 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1355 for (i = 0; i < num_actuals; i++)
1357 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1358 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1359 rtx addr;
1361 /* Skip this parm if it will not be passed on the stack. */
1362 if (! args[i].pass_on_stack && args[i].reg != 0)
1363 continue;
1365 if (GET_CODE (offset) == CONST_INT)
1366 addr = plus_constant (arg_reg, INTVAL (offset));
1367 else
1368 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1370 addr = plus_constant (addr, arg_offset);
1371 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1372 MEM_SET_IN_STRUCT_P
1373 (args[i].stack,
1374 AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value)));
1376 if (GET_CODE (slot_offset) == CONST_INT)
1377 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1378 else
1379 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1381 addr = plus_constant (addr, arg_offset);
1382 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1387 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1388 in a call instruction.
1390 FNDECL is the tree node for the target function. For an indirect call
1391 FNDECL will be NULL_TREE.
1393 EXP is the CALL_EXPR for this call. */
1395 static rtx
1396 rtx_for_function_call (fndecl, exp)
1397 tree fndecl;
1398 tree exp;
1400 rtx funexp;
1402 /* Get the function to call, in the form of RTL. */
1403 if (fndecl)
1405 /* If this is the first use of the function, see if we need to
1406 make an external definition for it. */
1407 if (! TREE_USED (fndecl))
1409 assemble_external (fndecl);
1410 TREE_USED (fndecl) = 1;
1413 /* Get a SYMBOL_REF rtx for the function address. */
1414 funexp = XEXP (DECL_RTL (fndecl), 0);
1416 else
1417 /* Generate an rtx (probably a pseudo-register) for the address. */
1419 rtx funaddr;
1420 push_temp_slots ();
1421 funaddr = funexp =
1422 expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1423 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1425 /* Check the function is executable. */
1426 if (current_function_check_memory_usage)
1428 #ifdef POINTERS_EXTEND_UNSIGNED
1429 /* It might be OK to convert funexp in place, but there's
1430 a lot going on between here and when it happens naturally
1431 that this seems safer. */
1432 funaddr = convert_memory_address (Pmode, funexp);
1433 #endif
1434 emit_library_call (chkr_check_exec_libfunc, 1,
1435 VOIDmode, 1,
1436 funaddr, Pmode);
1438 emit_queue ();
1440 return funexp;
1443 /* Do the register loads required for any wholly-register parms or any
1444 parms which are passed both on the stack and in a register. Their
1445 expressions were already evaluated.
1447 Mark all register-parms as living through the call, putting these USE
1448 insns in the CALL_INSN_FUNCTION_USAGE field. */
1450 static void
1451 load_register_parameters (args, num_actuals, call_fusage)
1452 struct arg_data *args;
1453 int num_actuals;
1454 rtx *call_fusage;
1456 int i, j;
1458 #ifdef LOAD_ARGS_REVERSED
1459 for (i = num_actuals - 1; i >= 0; i--)
1460 #else
1461 for (i = 0; i < num_actuals; i++)
1462 #endif
1464 rtx reg = args[i].reg;
1465 int partial = args[i].partial;
1466 int nregs;
1468 if (reg)
1470 /* Set to non-negative if must move a word at a time, even if just
1471 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1472 we just use a normal move insn. This value can be zero if the
1473 argument is a zero size structure with no fields. */
1474 nregs = (partial ? partial
1475 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1476 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1477 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1478 : -1));
1480 /* Handle calls that pass values in multiple non-contiguous
1481 locations. The Irix 6 ABI has examples of this. */
1483 if (GET_CODE (reg) == PARALLEL)
1485 emit_group_load (reg, args[i].value,
1486 int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
1487 (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1488 / BITS_PER_UNIT));
1491 /* If simple case, just do move. If normal partial, store_one_arg
1492 has already loaded the register for us. In all other cases,
1493 load the register(s) from memory. */
1495 else if (nregs == -1)
1496 emit_move_insn (reg, args[i].value);
1498 /* If we have pre-computed the values to put in the registers in
1499 the case of non-aligned structures, copy them in now. */
1501 else if (args[i].n_aligned_regs != 0)
1502 for (j = 0; j < args[i].n_aligned_regs; j++)
1503 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1504 args[i].aligned_regs[j]);
1506 else if (partial == 0 || args[i].pass_on_stack)
1507 move_block_to_reg (REGNO (reg),
1508 validize_mem (args[i].value), nregs,
1509 args[i].mode);
1511 /* Handle calls that pass values in multiple non-contiguous
1512 locations. The Irix 6 ABI has examples of this. */
1513 if (GET_CODE (reg) == PARALLEL)
1514 use_group_regs (call_fusage, reg);
1515 else if (nregs == -1)
1516 use_reg (call_fusage, reg);
1517 else
1518 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1523 /* Generate all the code for a function call
1524 and return an rtx for its value.
1525 Store the value in TARGET (specified as an rtx) if convenient.
1526 If the value is stored in TARGET then TARGET is returned.
1527 If IGNORE is nonzero, then we ignore the value of the function call. */
1530 expand_call (exp, target, ignore)
1531 tree exp;
1532 rtx target;
1533 int ignore;
1535 /* List of actual parameters. */
1536 tree actparms = TREE_OPERAND (exp, 1);
1537 /* RTX for the function to be called. */
1538 rtx funexp;
1539 /* Data type of the function. */
1540 tree funtype;
1541 /* Declaration of the function being called,
1542 or 0 if the function is computed (not known by name). */
1543 tree fndecl = 0;
1544 char *name = 0;
1546 /* Register in which non-BLKmode value will be returned,
1547 or 0 if no value or if value is BLKmode. */
1548 rtx valreg;
1549 /* Address where we should return a BLKmode value;
1550 0 if value not BLKmode. */
1551 rtx structure_value_addr = 0;
1552 /* Nonzero if that address is being passed by treating it as
1553 an extra, implicit first parameter. Otherwise,
1554 it is passed by being copied directly into struct_value_rtx. */
1555 int structure_value_addr_parm = 0;
1556 /* Size of aggregate value wanted, or zero if none wanted
1557 or if we are using the non-reentrant PCC calling convention
1558 or expecting the value in registers. */
1559 HOST_WIDE_INT struct_value_size = 0;
1560 /* Nonzero if called function returns an aggregate in memory PCC style,
1561 by returning the address of where to find it. */
1562 int pcc_struct_value = 0;
1564 /* Number of actual parameters in this call, including struct value addr. */
1565 int num_actuals;
1566 /* Number of named args. Args after this are anonymous ones
1567 and they must all go on the stack. */
1568 int n_named_args;
1570 /* Vector of information about each argument.
1571 Arguments are numbered in the order they will be pushed,
1572 not the order they are written. */
1573 struct arg_data *args;
1575 /* Total size in bytes of all the stack-parms scanned so far. */
1576 struct args_size args_size;
1577 /* Size of arguments before any adjustments (such as rounding). */
1578 int unadjusted_args_size;
1579 /* Data on reg parms scanned so far. */
1580 CUMULATIVE_ARGS args_so_far;
1581 /* Nonzero if a reg parm has been scanned. */
1582 int reg_parm_seen;
1583 /* Nonzero if this is an indirect function call. */
1585 /* Nonzero if we must avoid push-insns in the args for this call.
1586 If stack space is allocated for register parameters, but not by the
1587 caller, then it is preallocated in the fixed part of the stack frame.
1588 So the entire argument block must then be preallocated (i.e., we
1589 ignore PUSH_ROUNDING in that case). */
1591 #ifdef PUSH_ROUNDING
1592 int must_preallocate = 0;
1593 #else
1594 int must_preallocate = 1;
1595 #endif
1597 /* Size of the stack reserved for parameter registers. */
1598 int reg_parm_stack_space = 0;
1600 /* Address of space preallocated for stack parms
1601 (on machines that lack push insns), or 0 if space not preallocated. */
1602 rtx argblock = 0;
1604 /* Nonzero if it is plausible that this is a call to alloca. */
1605 int may_be_alloca;
1606 /* Nonzero if this is a call to malloc or a related function. */
1607 int is_malloc;
1608 /* Nonzero if this is a call to setjmp or a related function. */
1609 int returns_twice;
1610 /* Nonzero if this is a call to `longjmp'. */
1611 int is_longjmp;
1612 /* Nonzero if this is a call to an inline function. */
1613 int is_integrable = 0;
1614 /* Nonzero if this is a call to a `const' function.
1615 Note that only explicitly named functions are handled as `const' here. */
1616 int is_const = 0;
1617 /* Nonzero if this is a call to a `volatile' function. */
1618 int is_volatile = 0;
1619 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1620 /* Define the boundary of the register parm stack space that needs to be
1621 save, if any. */
1622 int low_to_save = -1, high_to_save;
1623 rtx save_area = 0; /* Place that it is saved */
1624 #endif
1626 #ifdef ACCUMULATE_OUTGOING_ARGS
1627 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1628 char *initial_stack_usage_map = stack_usage_map;
1629 int old_stack_arg_under_construction;
1630 #endif
1632 rtx old_stack_level = 0;
1633 int old_pending_adj = 0;
1634 int old_inhibit_defer_pop = inhibit_defer_pop;
1635 rtx call_fusage = 0;
1636 register tree p;
1637 register int i;
1639 /* The value of the function call can be put in a hard register. But
1640 if -fcheck-memory-usage, code which invokes functions (and thus
1641 damages some hard registers) can be inserted before using the value.
1642 So, target is always a pseudo-register in that case. */
1643 if (current_function_check_memory_usage)
1644 target = 0;
1646 /* See if we can find a DECL-node for the actual function.
1647 As a result, decide whether this is a call to an integrable function. */
1649 p = TREE_OPERAND (exp, 0);
1650 if (TREE_CODE (p) == ADDR_EXPR)
1652 fndecl = TREE_OPERAND (p, 0);
1653 if (TREE_CODE (fndecl) != FUNCTION_DECL)
1654 fndecl = 0;
1655 else
1657 if (!flag_no_inline
1658 && fndecl != current_function_decl
1659 && DECL_INLINE (fndecl)
1660 && DECL_SAVED_INSNS (fndecl)
1661 && DECL_SAVED_INSNS (fndecl)->inlinable)
1662 is_integrable = 1;
1663 else if (! TREE_ADDRESSABLE (fndecl))
1665 /* In case this function later becomes inlinable,
1666 record that there was already a non-inline call to it.
1668 Use abstraction instead of setting TREE_ADDRESSABLE
1669 directly. */
1670 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1671 && optimize > 0)
1673 warning_with_decl (fndecl, "can't inline call to `%s'");
1674 warning ("called from here");
1676 mark_addressable (fndecl);
1679 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
1680 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
1681 is_const = 1;
1683 if (TREE_THIS_VOLATILE (fndecl))
1684 is_volatile = 1;
1688 /* If we don't have specific function to call, see if we have a
1689 constant or `noreturn' function from the type. */
1690 if (fndecl == 0)
1692 is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
1693 is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
1696 #ifdef REG_PARM_STACK_SPACE
1697 #ifdef MAYBE_REG_PARM_STACK_SPACE
1698 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1699 #else
1700 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1701 #endif
1702 #endif
1704 #if defined(PUSH_ROUNDING) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1705 if (reg_parm_stack_space > 0)
1706 must_preallocate = 1;
1707 #endif
1709 /* Warn if this value is an aggregate type,
1710 regardless of which calling convention we are using for it. */
1711 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
1712 warning ("function call has aggregate value");
1714 /* Set up a place to return a structure. */
1716 /* Cater to broken compilers. */
1717 if (aggregate_value_p (exp))
1719 /* This call returns a big structure. */
1720 is_const = 0;
1722 #ifdef PCC_STATIC_STRUCT_RETURN
1724 pcc_struct_value = 1;
1725 /* Easier than making that case work right. */
1726 if (is_integrable)
1728 /* In case this is a static function, note that it has been
1729 used. */
1730 if (! TREE_ADDRESSABLE (fndecl))
1731 mark_addressable (fndecl);
1732 is_integrable = 0;
1735 #else /* not PCC_STATIC_STRUCT_RETURN */
1737 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
1739 if (target && GET_CODE (target) == MEM)
1740 structure_value_addr = XEXP (target, 0);
1741 else
1743 /* Assign a temporary to hold the value. */
1744 tree d;
1746 /* For variable-sized objects, we must be called with a target
1747 specified. If we were to allocate space on the stack here,
1748 we would have no way of knowing when to free it. */
1750 if (struct_value_size < 0)
1751 abort ();
1753 /* This DECL is just something to feed to mark_addressable;
1754 it doesn't get pushed. */
1755 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1756 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
1757 mark_addressable (d);
1758 structure_value_addr = XEXP (DECL_RTL (d), 0);
1759 TREE_USED (d) = 1;
1760 target = 0;
1763 #endif /* not PCC_STATIC_STRUCT_RETURN */
1766 /* If called function is inline, try to integrate it. */
1768 if (is_integrable)
1770 rtx temp;
1771 #ifdef ACCUMULATE_OUTGOING_ARGS
1772 rtx before_call = get_last_insn ();
1773 #endif
1775 temp = expand_inline_function (fndecl, actparms, target,
1776 ignore, TREE_TYPE (exp),
1777 structure_value_addr);
1779 /* If inlining succeeded, return. */
1780 if (temp != (rtx) (HOST_WIDE_INT) -1)
1782 #ifdef ACCUMULATE_OUTGOING_ARGS
1783 /* If the outgoing argument list must be preserved, push
1784 the stack before executing the inlined function if it
1785 makes any calls. */
1787 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1788 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1789 break;
1791 if (stack_arg_under_construction || i >= 0)
1793 rtx first_insn
1794 = before_call ? NEXT_INSN (before_call) : get_insns ();
1795 rtx insn = NULL_RTX, seq;
1797 /* Look for a call in the inline function code.
1798 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1799 nonzero then there is a call and it is not necessary
1800 to scan the insns. */
1802 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
1803 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1804 if (GET_CODE (insn) == CALL_INSN)
1805 break;
1807 if (insn)
1809 /* Reserve enough stack space so that the largest
1810 argument list of any function call in the inline
1811 function does not overlap the argument list being
1812 evaluated. This is usually an overestimate because
1813 allocate_dynamic_stack_space reserves space for an
1814 outgoing argument list in addition to the requested
1815 space, but there is no way to ask for stack space such
1816 that an argument list of a certain length can be
1817 safely constructed.
1819 Add the stack space reserved for register arguments, if
1820 any, in the inline function. What is really needed is the
1821 largest value of reg_parm_stack_space in the inline
1822 function, but that is not available. Using the current
1823 value of reg_parm_stack_space is wrong, but gives
1824 correct results on all supported machines. */
1826 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
1827 + reg_parm_stack_space);
1829 start_sequence ();
1830 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1831 allocate_dynamic_stack_space (GEN_INT (adjust),
1832 NULL_RTX, BITS_PER_UNIT);
1833 seq = get_insns ();
1834 end_sequence ();
1835 emit_insns_before (seq, first_insn);
1836 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1839 #endif
1841 /* If the result is equivalent to TARGET, return TARGET to simplify
1842 checks in store_expr. They can be equivalent but not equal in the
1843 case of a function that returns BLKmode. */
1844 if (temp != target && rtx_equal_p (temp, target))
1845 return target;
1846 return temp;
1849 /* If inlining failed, mark FNDECL as needing to be compiled
1850 separately after all. If function was declared inline,
1851 give a warning. */
1852 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1853 && optimize > 0 && ! TREE_ADDRESSABLE (fndecl))
1855 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1856 warning ("called from here");
1858 mark_addressable (fndecl);
1861 function_call_count++;
1863 if (fndecl && DECL_NAME (fndecl))
1864 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
1866 /* See if this is a call to a function that can return more than once
1867 or a call to longjmp or malloc. */
1868 special_function_p (name, fndecl, &returns_twice, &is_longjmp,
1869 &is_malloc, &may_be_alloca);
1871 if (may_be_alloca)
1872 current_function_calls_alloca = 1;
1874 /* Operand 0 is a pointer-to-function; get the type of the function. */
1875 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
1876 if (! POINTER_TYPE_P (funtype))
1877 abort ();
1878 funtype = TREE_TYPE (funtype);
1880 /* When calling a const function, we must pop the stack args right away,
1881 so that the pop is deleted or moved with the call. */
1882 if (is_const)
1883 NO_DEFER_POP;
1885 /* Don't let pending stack adjusts add up to too much.
1886 Also, do all pending adjustments now
1887 if there is any chance this might be a call to alloca. */
1889 if (pending_stack_adjust >= 32
1890 || (pending_stack_adjust > 0 && may_be_alloca))
1891 do_pending_stack_adjust ();
1893 /* Push the temporary stack slot level so that we can free any temporaries
1894 we make. */
1895 push_temp_slots ();
1897 /* Start updating where the next arg would go.
1899 On some machines (such as the PA) indirect calls have a different
1900 calling convention than normal calls. The last argument in
1901 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
1902 or not. */
1903 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
1905 /* If struct_value_rtx is 0, it means pass the address
1906 as if it were an extra parameter. */
1907 if (structure_value_addr && struct_value_rtx == 0)
1909 /* If structure_value_addr is a REG other than
1910 virtual_outgoing_args_rtx, we can use always use it. If it
1911 is not a REG, we must always copy it into a register.
1912 If it is virtual_outgoing_args_rtx, we must copy it to another
1913 register in some cases. */
1914 rtx temp = (GET_CODE (structure_value_addr) != REG
1915 #ifdef ACCUMULATE_OUTGOING_ARGS
1916 || (stack_arg_under_construction
1917 && structure_value_addr == virtual_outgoing_args_rtx)
1918 #endif
1919 ? copy_addr_to_reg (structure_value_addr)
1920 : structure_value_addr);
1922 actparms
1923 = tree_cons (error_mark_node,
1924 make_tree (build_pointer_type (TREE_TYPE (funtype)),
1925 temp),
1926 actparms);
1927 structure_value_addr_parm = 1;
1930 /* Count the arguments and set NUM_ACTUALS. */
1931 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
1932 num_actuals = i;
1934 /* Compute number of named args.
1935 Normally, don't include the last named arg if anonymous args follow.
1936 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
1937 (If no anonymous args follow, the result of list_length is actually
1938 one too large. This is harmless.)
1940 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
1941 zero, this machine will be able to place unnamed args that were passed in
1942 registers into the stack. So treat all args as named. This allows the
1943 insns emitting for a specific argument list to be independent of the
1944 function declaration.
1946 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any reliable
1947 way to pass unnamed args in registers, so we must force them into
1948 memory. */
1950 if ((STRICT_ARGUMENT_NAMING
1951 || ! PRETEND_OUTGOING_VARARGS_NAMED)
1952 && TYPE_ARG_TYPES (funtype) != 0)
1953 n_named_args
1954 = (list_length (TYPE_ARG_TYPES (funtype))
1955 /* Don't include the last named arg. */
1956 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
1957 /* Count the struct value address, if it is passed as a parm. */
1958 + structure_value_addr_parm);
1959 else
1960 /* If we know nothing, treat all args as named. */
1961 n_named_args = num_actuals;
1963 /* Make a vector to hold all the information about each arg. */
1964 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
1965 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
1967 /* Build up entries inthe ARGS array, compute the size of the arguments
1968 into ARGS_SIZE, etc. */
1969 initialize_argument_information (num_actuals, args, &args_size, n_named_args,
1970 actparms, fndecl, &args_so_far,
1971 reg_parm_stack_space, &old_stack_level,
1972 &old_pending_adj, &must_preallocate,
1973 &is_const);
1975 #ifdef FINAL_REG_PARM_STACK_SPACE
1976 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
1977 args_size.var);
1978 #endif
1980 if (args_size.var)
1982 /* If this function requires a variable-sized argument list, don't try to
1983 make a cse'able block for this call. We may be able to do this
1984 eventually, but it is too complicated to keep track of what insns go
1985 in the cse'able block and which don't. */
1987 is_const = 0;
1988 must_preallocate = 1;
1991 /* Compute the actual size of the argument block required. The variable
1992 and constant sizes must be combined, the size may have to be rounded,
1993 and there may be a minimum required size. */
1994 unadjusted_args_size
1995 = compute_argument_block_size (reg_parm_stack_space, &args_size);
1997 /* Now make final decision about preallocating stack space. */
1998 must_preallocate = finalize_must_preallocate (must_preallocate,
1999 num_actuals, args, &args_size);
2001 /* If the structure value address will reference the stack pointer, we must
2002 stabilize it. We don't need to do this if we know that we are not going
2003 to adjust the stack pointer in processing this call. */
2005 if (structure_value_addr
2006 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2007 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
2008 && (args_size.var
2009 #ifndef ACCUMULATE_OUTGOING_ARGS
2010 || args_size.constant
2011 #endif
2013 structure_value_addr = copy_to_reg (structure_value_addr);
2015 /* Precompute any arguments as needed. */
2016 precompute_arguments (is_const, must_preallocate, num_actuals,
2017 args, &args_size);
2019 /* Now we are about to start emitting insns that can be deleted
2020 if a libcall is deleted. */
2021 if (is_const || is_malloc)
2022 start_sequence ();
2024 /* If we have no actual push instructions, or shouldn't use them,
2025 make space for all args right now. */
2027 if (args_size.var != 0)
2029 if (old_stack_level == 0)
2031 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2032 old_pending_adj = pending_stack_adjust;
2033 pending_stack_adjust = 0;
2034 #ifdef ACCUMULATE_OUTGOING_ARGS
2035 /* stack_arg_under_construction says whether a stack arg is
2036 being constructed at the old stack level. Pushing the stack
2037 gets a clean outgoing argument block. */
2038 old_stack_arg_under_construction = stack_arg_under_construction;
2039 stack_arg_under_construction = 0;
2040 #endif
2042 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
2044 else
2046 /* Note that we must go through the motions of allocating an argument
2047 block even if the size is zero because we may be storing args
2048 in the area reserved for register arguments, which may be part of
2049 the stack frame. */
2051 int needed = args_size.constant;
2053 /* Store the maximum argument space used. It will be pushed by
2054 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2055 checking). */
2057 if (needed > current_function_outgoing_args_size)
2058 current_function_outgoing_args_size = needed;
2060 if (must_preallocate)
2062 #ifdef ACCUMULATE_OUTGOING_ARGS
2063 /* Since the stack pointer will never be pushed, it is possible for
2064 the evaluation of a parm to clobber something we have already
2065 written to the stack. Since most function calls on RISC machines
2066 do not use the stack, this is uncommon, but must work correctly.
2068 Therefore, we save any area of the stack that was already written
2069 and that we are using. Here we set up to do this by making a new
2070 stack usage map from the old one. The actual save will be done
2071 by store_one_arg.
2073 Another approach might be to try to reorder the argument
2074 evaluations to avoid this conflicting stack usage. */
2076 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2077 /* Since we will be writing into the entire argument area, the
2078 map must be allocated for its entire size, not just the part that
2079 is the responsibility of the caller. */
2080 needed += reg_parm_stack_space;
2081 #endif
2083 #ifdef ARGS_GROW_DOWNWARD
2084 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2085 needed + 1);
2086 #else
2087 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2088 needed);
2089 #endif
2090 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2092 if (initial_highest_arg_in_use)
2093 bcopy (initial_stack_usage_map, stack_usage_map,
2094 initial_highest_arg_in_use);
2096 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2097 bzero (&stack_usage_map[initial_highest_arg_in_use],
2098 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2099 needed = 0;
2101 /* The address of the outgoing argument list must not be copied to a
2102 register here, because argblock would be left pointing to the
2103 wrong place after the call to allocate_dynamic_stack_space below.
2106 argblock = virtual_outgoing_args_rtx;
2108 #else /* not ACCUMULATE_OUTGOING_ARGS */
2109 if (inhibit_defer_pop == 0)
2111 /* Try to reuse some or all of the pending_stack_adjust
2112 to get this space. Maybe we can avoid any pushing. */
2113 if (needed > pending_stack_adjust)
2115 needed -= pending_stack_adjust;
2116 pending_stack_adjust = 0;
2118 else
2120 pending_stack_adjust -= needed;
2121 needed = 0;
2124 /* Special case this because overhead of `push_block' in this
2125 case is non-trivial. */
2126 if (needed == 0)
2127 argblock = virtual_outgoing_args_rtx;
2128 else
2129 argblock = push_block (GEN_INT (needed), 0, 0);
2131 /* We only really need to call `copy_to_reg' in the case where push
2132 insns are going to be used to pass ARGBLOCK to a function
2133 call in ARGS. In that case, the stack pointer changes value
2134 from the allocation point to the call point, and hence
2135 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
2136 But might as well always do it. */
2137 argblock = copy_to_reg (argblock);
2138 #endif /* not ACCUMULATE_OUTGOING_ARGS */
2142 #ifdef ACCUMULATE_OUTGOING_ARGS
2143 /* The save/restore code in store_one_arg handles all cases except one:
2144 a constructor call (including a C function returning a BLKmode struct)
2145 to initialize an argument. */
2146 if (stack_arg_under_construction)
2148 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2149 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
2150 #else
2151 rtx push_size = GEN_INT (args_size.constant);
2152 #endif
2153 if (old_stack_level == 0)
2155 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2156 old_pending_adj = pending_stack_adjust;
2157 pending_stack_adjust = 0;
2158 /* stack_arg_under_construction says whether a stack arg is
2159 being constructed at the old stack level. Pushing the stack
2160 gets a clean outgoing argument block. */
2161 old_stack_arg_under_construction = stack_arg_under_construction;
2162 stack_arg_under_construction = 0;
2163 /* Make a new map for the new argument list. */
2164 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
2165 bzero (stack_usage_map, highest_outgoing_arg_in_use);
2166 highest_outgoing_arg_in_use = 0;
2168 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
2170 /* If argument evaluation might modify the stack pointer, copy the
2171 address of the argument list to a register. */
2172 for (i = 0; i < num_actuals; i++)
2173 if (args[i].pass_on_stack)
2175 argblock = copy_addr_to_reg (argblock);
2176 break;
2178 #endif
2180 compute_argument_addresses (args, argblock, num_actuals);
2182 #ifdef PUSH_ARGS_REVERSED
2183 #ifdef PREFERRED_STACK_BOUNDARY
2184 /* If we push args individually in reverse order, perform stack alignment
2185 before the first push (the last arg). */
2186 if (argblock == 0)
2187 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
2188 #endif
2189 #endif
2191 /* Don't try to defer pops if preallocating, not even from the first arg,
2192 since ARGBLOCK probably refers to the SP. */
2193 if (argblock)
2194 NO_DEFER_POP;
2196 funexp = rtx_for_function_call (fndecl, exp);
2198 /* Figure out the register where the value, if any, will come back. */
2199 valreg = 0;
2200 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2201 && ! structure_value_addr)
2203 if (pcc_struct_value)
2204 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2205 fndecl);
2206 else
2207 valreg = hard_function_value (TREE_TYPE (exp), fndecl);
2210 /* Precompute all register parameters. It isn't safe to compute anything
2211 once we have started filling any specific hard regs. */
2212 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2214 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2216 /* Save the fixed argument area if it's part of the caller's frame and
2217 is clobbered by argument setup for this call. */
2218 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2219 &low_to_save, &high_to_save);
2220 #endif
2223 /* Now store (and compute if necessary) all non-register parms.
2224 These come before register parms, since they can require block-moves,
2225 which could clobber the registers used for register parms.
2226 Parms which have partial registers are not stored here,
2227 but we do preallocate space here if they want that. */
2229 for (i = 0; i < num_actuals; i++)
2230 if (args[i].reg == 0 || args[i].pass_on_stack)
2231 store_one_arg (&args[i], argblock, may_be_alloca,
2232 args_size.var != 0, reg_parm_stack_space);
2234 /* If we have a parm that is passed in registers but not in memory
2235 and whose alignment does not permit a direct copy into registers,
2236 make a group of pseudos that correspond to each register that we
2237 will later fill. */
2238 if (STRICT_ALIGNMENT)
2239 store_unaligned_arguments_into_pseudos (args, num_actuals);
2241 /* Now store any partially-in-registers parm.
2242 This is the last place a block-move can happen. */
2243 if (reg_parm_seen)
2244 for (i = 0; i < num_actuals; i++)
2245 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2246 store_one_arg (&args[i], argblock, may_be_alloca,
2247 args_size.var != 0, reg_parm_stack_space);
2249 #ifndef PUSH_ARGS_REVERSED
2250 #ifdef PREFERRED_STACK_BOUNDARY
2251 /* If we pushed args in forward order, perform stack alignment
2252 after pushing the last arg. */
2253 if (argblock == 0)
2254 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
2255 #endif
2256 #endif
2258 /* If register arguments require space on the stack and stack space
2259 was not preallocated, allocate stack space here for arguments
2260 passed in registers. */
2261 #if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
2262 if (must_preallocate == 0 && reg_parm_stack_space > 0)
2263 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2264 #endif
2266 /* Pass the function the address in which to return a structure value. */
2267 if (structure_value_addr && ! structure_value_addr_parm)
2269 emit_move_insn (struct_value_rtx,
2270 force_reg (Pmode,
2271 force_operand (structure_value_addr,
2272 NULL_RTX)));
2274 /* Mark the memory for the aggregate as write-only. */
2275 if (current_function_check_memory_usage)
2276 emit_library_call (chkr_set_right_libfunc, 1,
2277 VOIDmode, 3,
2278 structure_value_addr, Pmode,
2279 GEN_INT (struct_value_size), TYPE_MODE (sizetype),
2280 GEN_INT (MEMORY_USE_WO),
2281 TYPE_MODE (integer_type_node));
2283 if (GET_CODE (struct_value_rtx) == REG)
2284 use_reg (&call_fusage, struct_value_rtx);
2287 funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
2289 load_register_parameters (args, num_actuals, &call_fusage);
2291 /* Perform postincrements before actually calling the function. */
2292 emit_queue ();
2294 /* All arguments and registers used for the call must be set up by now! */
2296 /* Generate the actual call instruction. */
2297 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
2298 args_size.constant, struct_value_size,
2299 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2300 valreg, old_inhibit_defer_pop, call_fusage, is_const);
2302 /* If call is cse'able, make appropriate pair of reg-notes around it.
2303 Test valreg so we don't crash; may safely ignore `const'
2304 if return type is void. Disable for PARALLEL return values, because
2305 we have no way to move such values into a pseudo register. */
2306 if (is_const && valreg != 0 && GET_CODE (valreg) != PARALLEL)
2308 rtx note = 0;
2309 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2310 rtx insns;
2312 /* Mark the return value as a pointer if needed. */
2313 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2315 tree pointed_to = TREE_TYPE (TREE_TYPE (exp));
2316 mark_reg_pointer (temp, TYPE_ALIGN (pointed_to) / BITS_PER_UNIT);
2319 /* Construct an "equal form" for the value which mentions all the
2320 arguments in order as well as the function name. */
2321 #ifdef PUSH_ARGS_REVERSED
2322 for (i = 0; i < num_actuals; i++)
2323 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2324 #else
2325 for (i = num_actuals - 1; i >= 0; i--)
2326 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2327 #endif
2328 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2330 insns = get_insns ();
2331 end_sequence ();
2333 emit_libcall_block (insns, temp, valreg, note);
2335 valreg = temp;
2337 else if (is_const)
2339 /* Otherwise, just write out the sequence without a note. */
2340 rtx insns = get_insns ();
2342 end_sequence ();
2343 emit_insns (insns);
2345 else if (is_malloc)
2347 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2348 rtx last, insns;
2350 /* The return value from a malloc-like function is a pointer. */
2351 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2352 mark_reg_pointer (temp, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2354 emit_move_insn (temp, valreg);
2356 /* The return value from a malloc-like function can not alias
2357 anything else. */
2358 last = get_last_insn ();
2359 REG_NOTES (last) =
2360 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2362 /* Write out the sequence. */
2363 insns = get_insns ();
2364 end_sequence ();
2365 emit_insns (insns);
2366 valreg = temp;
2369 /* For calls to `setjmp', etc., inform flow.c it should complain
2370 if nonvolatile values are live. */
2372 if (returns_twice)
2374 emit_note (name, NOTE_INSN_SETJMP);
2375 current_function_calls_setjmp = 1;
2378 if (is_longjmp)
2379 current_function_calls_longjmp = 1;
2381 /* Notice functions that cannot return.
2382 If optimizing, insns emitted below will be dead.
2383 If not optimizing, they will exist, which is useful
2384 if the user uses the `return' command in the debugger. */
2386 if (is_volatile || is_longjmp)
2387 emit_barrier ();
2389 /* If value type not void, return an rtx for the value. */
2391 /* If there are cleanups to be called, don't use a hard reg as target.
2392 We need to double check this and see if it matters anymore. */
2393 if (any_pending_cleanups (1)
2394 && target && REG_P (target)
2395 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2396 target = 0;
2398 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2399 || ignore)
2401 target = const0_rtx;
2403 else if (structure_value_addr)
2405 if (target == 0 || GET_CODE (target) != MEM)
2407 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2408 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2409 structure_value_addr));
2410 MEM_SET_IN_STRUCT_P (target,
2411 AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2414 else if (pcc_struct_value)
2416 /* This is the special C++ case where we need to
2417 know what the true target was. We take care to
2418 never use this value more than once in one expression. */
2419 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2420 copy_to_reg (valreg));
2421 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2423 /* Handle calls that return values in multiple non-contiguous locations.
2424 The Irix 6 ABI has examples of this. */
2425 else if (GET_CODE (valreg) == PARALLEL)
2427 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2429 if (target == 0)
2431 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)), bytes, 0);
2432 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2433 preserve_temp_slots (target);
2436 if (! rtx_equal_p (target, valreg))
2437 emit_group_store (target, valreg, bytes,
2438 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2440 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2441 && GET_MODE (target) == GET_MODE (valreg))
2442 /* TARGET and VALREG cannot be equal at this point because the latter
2443 would not have REG_FUNCTION_VALUE_P true, while the former would if
2444 it were referring to the same register.
2446 If they refer to the same register, this move will be a no-op, except
2447 when function inlining is being done. */
2448 emit_move_insn (target, valreg);
2449 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2450 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
2451 else
2452 target = copy_to_reg (valreg);
2454 #ifdef PROMOTE_FUNCTION_RETURN
2455 /* If we promoted this return value, make the proper SUBREG. TARGET
2456 might be const0_rtx here, so be careful. */
2457 if (GET_CODE (target) == REG
2458 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2459 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2461 tree type = TREE_TYPE (exp);
2462 int unsignedp = TREE_UNSIGNED (type);
2464 /* If we don't promote as expected, something is wrong. */
2465 if (GET_MODE (target)
2466 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
2467 abort ();
2469 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
2470 SUBREG_PROMOTED_VAR_P (target) = 1;
2471 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2473 #endif
2475 /* If size of args is variable or this was a constructor call for a stack
2476 argument, restore saved stack-pointer value. */
2478 if (old_stack_level)
2480 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2481 pending_stack_adjust = old_pending_adj;
2482 #ifdef ACCUMULATE_OUTGOING_ARGS
2483 stack_arg_under_construction = old_stack_arg_under_construction;
2484 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2485 stack_usage_map = initial_stack_usage_map;
2486 #endif
2488 #ifdef ACCUMULATE_OUTGOING_ARGS
2489 else
2491 #ifdef REG_PARM_STACK_SPACE
2492 if (save_area)
2493 restore_fixed_argument_area (save_area, argblock,
2494 high_to_save, low_to_save);
2495 #endif
2497 /* If we saved any argument areas, restore them. */
2498 for (i = 0; i < num_actuals; i++)
2499 if (args[i].save_area)
2501 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2502 rtx stack_area
2503 = gen_rtx_MEM (save_mode,
2504 memory_address (save_mode,
2505 XEXP (args[i].stack_slot, 0)));
2507 if (save_mode != BLKmode)
2508 emit_move_insn (stack_area, args[i].save_area);
2509 else
2510 emit_block_move (stack_area, validize_mem (args[i].save_area),
2511 GEN_INT (args[i].size.constant),
2512 PARM_BOUNDARY / BITS_PER_UNIT);
2515 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2516 stack_usage_map = initial_stack_usage_map;
2518 #endif
2520 /* If this was alloca, record the new stack level for nonlocal gotos.
2521 Check for the handler slots since we might not have a save area
2522 for non-local gotos. */
2524 if (may_be_alloca && nonlocal_goto_handler_slots != 0)
2525 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
2527 pop_temp_slots ();
2529 /* Free up storage we no longer need. */
2530 for (i = 0; i < num_actuals; ++i)
2531 if (args[i].aligned_regs)
2532 free (args[i].aligned_regs);
2534 return target;
2537 /* Output a library call to function FUN (a SYMBOL_REF rtx)
2538 (emitting the queue unless NO_QUEUE is nonzero),
2539 for a value of mode OUTMODE,
2540 with NARGS different arguments, passed as alternating rtx values
2541 and machine_modes to convert them to.
2542 The rtx values should have been passed through protect_from_queue already.
2544 NO_QUEUE will be true if and only if the library call is a `const' call
2545 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2546 to the variable is_const in expand_call.
2548 NO_QUEUE must be true for const calls, because if it isn't, then
2549 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2550 and will be lost if the libcall sequence is optimized away.
2552 NO_QUEUE must be false for non-const calls, because if it isn't, the
2553 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2554 optimized. For instance, the instruction scheduler may incorrectly
2555 move memory references across the non-const call. */
2557 void
2558 emit_library_call VPROTO((rtx orgfun, int no_queue, enum machine_mode outmode,
2559 int nargs, ...))
2561 #ifndef ANSI_PROTOTYPES
2562 rtx orgfun;
2563 int no_queue;
2564 enum machine_mode outmode;
2565 int nargs;
2566 #endif
2567 va_list p;
2568 /* Total size in bytes of all the stack-parms scanned so far. */
2569 struct args_size args_size;
2570 /* Size of arguments before any adjustments (such as rounding). */
2571 struct args_size original_args_size;
2572 register int argnum;
2573 rtx fun;
2574 int inc;
2575 int count;
2576 rtx argblock = 0;
2577 CUMULATIVE_ARGS args_so_far;
2578 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2579 struct args_size offset; struct args_size size; rtx save_area; };
2580 struct arg *argvec;
2581 int old_inhibit_defer_pop = inhibit_defer_pop;
2582 rtx call_fusage = 0;
2583 int reg_parm_stack_space = 0;
2584 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2585 /* Define the boundary of the register parm stack space that needs to be
2586 save, if any. */
2587 int low_to_save = -1, high_to_save = 0;
2588 rtx save_area = 0; /* Place that it is saved */
2589 #endif
2591 #ifdef ACCUMULATE_OUTGOING_ARGS
2592 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2593 char *initial_stack_usage_map = stack_usage_map;
2594 int needed;
2595 #endif
2597 #ifdef REG_PARM_STACK_SPACE
2598 /* Size of the stack reserved for parameter registers. */
2599 #ifdef MAYBE_REG_PARM_STACK_SPACE
2600 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2601 #else
2602 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
2603 #endif
2604 #endif
2606 VA_START (p, nargs);
2608 #ifndef ANSI_PROTOTYPES
2609 orgfun = va_arg (p, rtx);
2610 no_queue = va_arg (p, int);
2611 outmode = va_arg (p, enum machine_mode);
2612 nargs = va_arg (p, int);
2613 #endif
2615 fun = orgfun;
2617 /* Copy all the libcall-arguments out of the varargs data
2618 and into a vector ARGVEC.
2620 Compute how to pass each argument. We only support a very small subset
2621 of the full argument passing conventions to limit complexity here since
2622 library functions shouldn't have many args. */
2624 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2625 bzero ((char *) argvec, nargs * sizeof (struct arg));
2628 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
2630 args_size.constant = 0;
2631 args_size.var = 0;
2633 push_temp_slots ();
2635 for (count = 0; count < nargs; count++)
2637 rtx val = va_arg (p, rtx);
2638 enum machine_mode mode = va_arg (p, enum machine_mode);
2640 /* We cannot convert the arg value to the mode the library wants here;
2641 must do it earlier where we know the signedness of the arg. */
2642 if (mode == BLKmode
2643 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2644 abort ();
2646 /* On some machines, there's no way to pass a float to a library fcn.
2647 Pass it as a double instead. */
2648 #ifdef LIBGCC_NEEDS_DOUBLE
2649 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2650 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
2651 #endif
2653 /* There's no need to call protect_from_queue, because
2654 either emit_move_insn or emit_push_insn will do that. */
2656 /* Make sure it is a reasonable operand for a move or push insn. */
2657 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2658 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2659 val = force_operand (val, NULL_RTX);
2661 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2662 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2664 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2665 be viewed as just an efficiency improvement. */
2666 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2667 emit_move_insn (slot, val);
2668 val = force_operand (XEXP (slot, 0), NULL_RTX);
2669 mode = Pmode;
2671 #endif
2673 argvec[count].value = val;
2674 argvec[count].mode = mode;
2676 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2677 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
2678 abort ();
2679 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2680 argvec[count].partial
2681 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2682 #else
2683 argvec[count].partial = 0;
2684 #endif
2686 locate_and_pad_parm (mode, NULL_TREE,
2687 argvec[count].reg && argvec[count].partial == 0,
2688 NULL_TREE, &args_size, &argvec[count].offset,
2689 &argvec[count].size);
2691 if (argvec[count].size.var)
2692 abort ();
2694 if (reg_parm_stack_space == 0 && argvec[count].partial)
2695 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2697 if (argvec[count].reg == 0 || argvec[count].partial != 0
2698 || reg_parm_stack_space > 0)
2699 args_size.constant += argvec[count].size.constant;
2701 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
2703 va_end (p);
2705 #ifdef FINAL_REG_PARM_STACK_SPACE
2706 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2707 args_size.var);
2708 #endif
2710 /* If this machine requires an external definition for library
2711 functions, write one out. */
2712 assemble_external_libcall (fun);
2714 original_args_size = args_size;
2715 #ifdef PREFERRED_STACK_BOUNDARY
2716 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2717 / STACK_BYTES) * STACK_BYTES);
2718 #endif
2720 args_size.constant = MAX (args_size.constant,
2721 reg_parm_stack_space);
2723 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2724 args_size.constant -= reg_parm_stack_space;
2725 #endif
2727 if (args_size.constant > current_function_outgoing_args_size)
2728 current_function_outgoing_args_size = args_size.constant;
2730 #ifdef ACCUMULATE_OUTGOING_ARGS
2731 /* Since the stack pointer will never be pushed, it is possible for
2732 the evaluation of a parm to clobber something we have already
2733 written to the stack. Since most function calls on RISC machines
2734 do not use the stack, this is uncommon, but must work correctly.
2736 Therefore, we save any area of the stack that was already written
2737 and that we are using. Here we set up to do this by making a new
2738 stack usage map from the old one.
2740 Another approach might be to try to reorder the argument
2741 evaluations to avoid this conflicting stack usage. */
2743 needed = args_size.constant;
2745 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2746 /* Since we will be writing into the entire argument area, the
2747 map must be allocated for its entire size, not just the part that
2748 is the responsibility of the caller. */
2749 needed += reg_parm_stack_space;
2750 #endif
2752 #ifdef ARGS_GROW_DOWNWARD
2753 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2754 needed + 1);
2755 #else
2756 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2757 needed);
2758 #endif
2759 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2761 if (initial_highest_arg_in_use)
2762 bcopy (initial_stack_usage_map, stack_usage_map,
2763 initial_highest_arg_in_use);
2765 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2766 bzero (&stack_usage_map[initial_highest_arg_in_use],
2767 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2768 needed = 0;
2770 /* The address of the outgoing argument list must not be copied to a
2771 register here, because argblock would be left pointing to the
2772 wrong place after the call to allocate_dynamic_stack_space below.
2775 argblock = virtual_outgoing_args_rtx;
2776 #else /* not ACCUMULATE_OUTGOING_ARGS */
2777 #ifndef PUSH_ROUNDING
2778 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2779 #endif
2780 #endif
2782 #ifdef PUSH_ARGS_REVERSED
2783 #ifdef PREFERRED_STACK_BOUNDARY
2784 /* If we push args individually in reverse order, perform stack alignment
2785 before the first push (the last arg). */
2786 if (argblock == 0)
2787 anti_adjust_stack (GEN_INT (args_size.constant
2788 - original_args_size.constant));
2789 #endif
2790 #endif
2792 #ifdef PUSH_ARGS_REVERSED
2793 inc = -1;
2794 argnum = nargs - 1;
2795 #else
2796 inc = 1;
2797 argnum = 0;
2798 #endif
2800 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2801 /* The argument list is the property of the called routine and it
2802 may clobber it. If the fixed area has been used for previous
2803 parameters, we must save and restore it.
2805 Here we compute the boundary of the that needs to be saved, if any. */
2807 #ifdef ARGS_GROW_DOWNWARD
2808 for (count = 0; count < reg_parm_stack_space + 1; count++)
2809 #else
2810 for (count = 0; count < reg_parm_stack_space; count++)
2811 #endif
2813 if (count >= highest_outgoing_arg_in_use
2814 || stack_usage_map[count] == 0)
2815 continue;
2817 if (low_to_save == -1)
2818 low_to_save = count;
2820 high_to_save = count;
2823 if (low_to_save >= 0)
2825 int num_to_save = high_to_save - low_to_save + 1;
2826 enum machine_mode save_mode
2827 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
2828 rtx stack_area;
2830 /* If we don't have the required alignment, must do this in BLKmode. */
2831 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
2832 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
2833 save_mode = BLKmode;
2835 #ifdef ARGS_GROW_DOWNWARD
2836 stack_area = gen_rtx_MEM (save_mode,
2837 memory_address (save_mode,
2838 plus_constant (argblock,
2839 - high_to_save)));
2840 #else
2841 stack_area = gen_rtx_MEM (save_mode,
2842 memory_address (save_mode,
2843 plus_constant (argblock,
2844 low_to_save)));
2845 #endif
2846 if (save_mode == BLKmode)
2848 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
2849 emit_block_move (validize_mem (save_area), stack_area,
2850 GEN_INT (num_to_save),
2851 PARM_BOUNDARY / BITS_PER_UNIT);
2853 else
2855 save_area = gen_reg_rtx (save_mode);
2856 emit_move_insn (save_area, stack_area);
2859 #endif
2861 /* Push the args that need to be pushed. */
2863 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2864 are to be pushed. */
2865 for (count = 0; count < nargs; count++, argnum += inc)
2867 register enum machine_mode mode = argvec[argnum].mode;
2868 register rtx val = argvec[argnum].value;
2869 rtx reg = argvec[argnum].reg;
2870 int partial = argvec[argnum].partial;
2871 #ifdef ACCUMULATE_OUTGOING_ARGS
2872 int lower_bound, upper_bound, i;
2873 #endif
2875 if (! (reg != 0 && partial == 0))
2877 #ifdef ACCUMULATE_OUTGOING_ARGS
2878 /* If this is being stored into a pre-allocated, fixed-size, stack
2879 area, save any previous data at that location. */
2881 #ifdef ARGS_GROW_DOWNWARD
2882 /* stack_slot is negative, but we want to index stack_usage_map
2883 with positive values. */
2884 upper_bound = -argvec[argnum].offset.constant + 1;
2885 lower_bound = upper_bound - argvec[argnum].size.constant;
2886 #else
2887 lower_bound = argvec[argnum].offset.constant;
2888 upper_bound = lower_bound + argvec[argnum].size.constant;
2889 #endif
2891 for (i = lower_bound; i < upper_bound; i++)
2892 if (stack_usage_map[i]
2893 /* Don't store things in the fixed argument area at this point;
2894 it has already been saved. */
2895 && i > reg_parm_stack_space)
2896 break;
2898 if (i != upper_bound)
2900 /* We need to make a save area. See what mode we can make it. */
2901 enum machine_mode save_mode
2902 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
2903 MODE_INT, 1);
2904 rtx stack_area
2905 = gen_rtx_MEM
2906 (save_mode,
2907 memory_address
2908 (save_mode,
2909 plus_constant (argblock,
2910 argvec[argnum].offset.constant)));
2912 argvec[argnum].save_area = gen_reg_rtx (save_mode);
2913 emit_move_insn (argvec[argnum].save_area, stack_area);
2915 #endif
2916 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2917 argblock, GEN_INT (argvec[argnum].offset.constant),
2918 reg_parm_stack_space);
2920 #ifdef ACCUMULATE_OUTGOING_ARGS
2921 /* Now mark the segment we just used. */
2922 for (i = lower_bound; i < upper_bound; i++)
2923 stack_usage_map[i] = 1;
2924 #endif
2926 NO_DEFER_POP;
2930 #ifndef PUSH_ARGS_REVERSED
2931 #ifdef PREFERRED_STACK_BOUNDARY
2932 /* If we pushed args in forward order, perform stack alignment
2933 after pushing the last arg. */
2934 if (argblock == 0)
2935 anti_adjust_stack (GEN_INT (args_size.constant
2936 - original_args_size.constant));
2937 #endif
2938 #endif
2940 #ifdef PUSH_ARGS_REVERSED
2941 argnum = nargs - 1;
2942 #else
2943 argnum = 0;
2944 #endif
2946 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
2948 /* Now load any reg parms into their regs. */
2950 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2951 are to be pushed. */
2952 for (count = 0; count < nargs; count++, argnum += inc)
2954 register rtx val = argvec[argnum].value;
2955 rtx reg = argvec[argnum].reg;
2956 int partial = argvec[argnum].partial;
2958 if (reg != 0 && partial == 0)
2959 emit_move_insn (reg, val);
2960 NO_DEFER_POP;
2963 /* For version 1.37, try deleting this entirely. */
2964 if (! no_queue)
2965 emit_queue ();
2967 /* Any regs containing parms remain in use through the call. */
2968 for (count = 0; count < nargs; count++)
2969 if (argvec[count].reg != 0)
2970 use_reg (&call_fusage, argvec[count].reg);
2972 /* Don't allow popping to be deferred, since then
2973 cse'ing of library calls could delete a call and leave the pop. */
2974 NO_DEFER_POP;
2976 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2977 will set inhibit_defer_pop to that value. */
2979 /* The return type is needed to decide how many bytes the function pops.
2980 Signedness plays no role in that, so for simplicity, we pretend it's
2981 always signed. We also assume that the list of arguments passed has
2982 no impact, so we pretend it is unknown. */
2984 emit_call_1 (fun,
2985 get_identifier (XSTR (orgfun, 0)),
2986 build_function_type (outmode == VOIDmode ? void_type_node
2987 : type_for_mode (outmode, 0), NULL_TREE),
2988 original_args_size.constant, args_size.constant, 0,
2989 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2990 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2991 old_inhibit_defer_pop + 1, call_fusage, no_queue);
2993 pop_temp_slots ();
2995 /* Now restore inhibit_defer_pop to its actual original value. */
2996 OK_DEFER_POP;
2998 #ifdef ACCUMULATE_OUTGOING_ARGS
2999 #ifdef REG_PARM_STACK_SPACE
3000 if (save_area)
3002 enum machine_mode save_mode = GET_MODE (save_area);
3003 #ifdef ARGS_GROW_DOWNWARD
3004 rtx stack_area
3005 = gen_rtx_MEM (save_mode,
3006 memory_address (save_mode,
3007 plus_constant (argblock,
3008 - high_to_save)));
3009 #else
3010 rtx stack_area
3011 = gen_rtx_MEM (save_mode,
3012 memory_address (save_mode,
3013 plus_constant (argblock, low_to_save)));
3014 #endif
3016 if (save_mode != BLKmode)
3017 emit_move_insn (stack_area, save_area);
3018 else
3019 emit_block_move (stack_area, validize_mem (save_area),
3020 GEN_INT (high_to_save - low_to_save + 1),
3021 PARM_BOUNDARY / BITS_PER_UNIT);
3023 #endif
3025 /* If we saved any argument areas, restore them. */
3026 for (count = 0; count < nargs; count++)
3027 if (argvec[count].save_area)
3029 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3030 rtx stack_area
3031 = gen_rtx_MEM (save_mode,
3032 memory_address
3033 (save_mode,
3034 plus_constant (argblock,
3035 argvec[count].offset.constant)));
3037 emit_move_insn (stack_area, argvec[count].save_area);
3040 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3041 stack_usage_map = initial_stack_usage_map;
3042 #endif
3045 /* Like emit_library_call except that an extra argument, VALUE,
3046 comes second and says where to store the result.
3047 (If VALUE is zero, this function chooses a convenient way
3048 to return the value.
3050 This function returns an rtx for where the value is to be found.
3051 If VALUE is nonzero, VALUE is returned. */
3054 emit_library_call_value VPROTO((rtx orgfun, rtx value, int no_queue,
3055 enum machine_mode outmode, int nargs, ...))
3057 #ifndef ANSI_PROTOTYPES
3058 rtx orgfun;
3059 rtx value;
3060 int no_queue;
3061 enum machine_mode outmode;
3062 int nargs;
3063 #endif
3064 va_list p;
3065 /* Total size in bytes of all the stack-parms scanned so far. */
3066 struct args_size args_size;
3067 /* Size of arguments before any adjustments (such as rounding). */
3068 struct args_size original_args_size;
3069 register int argnum;
3070 rtx fun;
3071 int inc;
3072 int count;
3073 rtx argblock = 0;
3074 CUMULATIVE_ARGS args_so_far;
3075 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
3076 struct args_size offset; struct args_size size; rtx save_area; };
3077 struct arg *argvec;
3078 int old_inhibit_defer_pop = inhibit_defer_pop;
3079 rtx call_fusage = 0;
3080 rtx mem_value = 0;
3081 int pcc_struct_value = 0;
3082 int struct_value_size = 0;
3083 int is_const;
3084 int reg_parm_stack_space = 0;
3085 #ifdef ACCUMULATE_OUTGOING_ARGS
3086 int needed;
3087 #endif
3089 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3090 /* Define the boundary of the register parm stack space that needs to be
3091 save, if any. */
3092 int low_to_save = -1, high_to_save = 0;
3093 rtx save_area = 0; /* Place that it is saved */
3094 #endif
3096 #ifdef ACCUMULATE_OUTGOING_ARGS
3097 /* Size of the stack reserved for parameter registers. */
3098 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3099 char *initial_stack_usage_map = stack_usage_map;
3100 #endif
3102 #ifdef REG_PARM_STACK_SPACE
3103 #ifdef MAYBE_REG_PARM_STACK_SPACE
3104 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3105 #else
3106 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3107 #endif
3108 #endif
3110 VA_START (p, nargs);
3112 #ifndef ANSI_PROTOTYPES
3113 orgfun = va_arg (p, rtx);
3114 value = va_arg (p, rtx);
3115 no_queue = va_arg (p, int);
3116 outmode = va_arg (p, enum machine_mode);
3117 nargs = va_arg (p, int);
3118 #endif
3120 is_const = no_queue;
3121 fun = orgfun;
3123 /* If this kind of value comes back in memory,
3124 decide where in memory it should come back. */
3125 if (aggregate_value_p (type_for_mode (outmode, 0)))
3127 #ifdef PCC_STATIC_STRUCT_RETURN
3128 rtx pointer_reg
3129 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
3131 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3132 pcc_struct_value = 1;
3133 if (value == 0)
3134 value = gen_reg_rtx (outmode);
3135 #else /* not PCC_STATIC_STRUCT_RETURN */
3136 struct_value_size = GET_MODE_SIZE (outmode);
3137 if (value != 0 && GET_CODE (value) == MEM)
3138 mem_value = value;
3139 else
3140 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
3141 #endif
3143 /* This call returns a big structure. */
3144 is_const = 0;
3147 /* ??? Unfinished: must pass the memory address as an argument. */
3149 /* Copy all the libcall-arguments out of the varargs data
3150 and into a vector ARGVEC.
3152 Compute how to pass each argument. We only support a very small subset
3153 of the full argument passing conventions to limit complexity here since
3154 library functions shouldn't have many args. */
3156 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
3157 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
3159 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3161 args_size.constant = 0;
3162 args_size.var = 0;
3164 count = 0;
3166 push_temp_slots ();
3168 /* If there's a structure value address to be passed,
3169 either pass it in the special place, or pass it as an extra argument. */
3170 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3172 rtx addr = XEXP (mem_value, 0);
3173 nargs++;
3175 /* Make sure it is a reasonable operand for a move or push insn. */
3176 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3177 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3178 addr = force_operand (addr, NULL_RTX);
3180 argvec[count].value = addr;
3181 argvec[count].mode = Pmode;
3182 argvec[count].partial = 0;
3184 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3185 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3186 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3187 abort ();
3188 #endif
3190 locate_and_pad_parm (Pmode, NULL_TREE,
3191 argvec[count].reg && argvec[count].partial == 0,
3192 NULL_TREE, &args_size, &argvec[count].offset,
3193 &argvec[count].size);
3196 if (argvec[count].reg == 0 || argvec[count].partial != 0
3197 || reg_parm_stack_space > 0)
3198 args_size.constant += argvec[count].size.constant;
3200 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3202 count++;
3205 for (; count < nargs; count++)
3207 rtx val = va_arg (p, rtx);
3208 enum machine_mode mode = va_arg (p, enum machine_mode);
3210 /* We cannot convert the arg value to the mode the library wants here;
3211 must do it earlier where we know the signedness of the arg. */
3212 if (mode == BLKmode
3213 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3214 abort ();
3216 /* On some machines, there's no way to pass a float to a library fcn.
3217 Pass it as a double instead. */
3218 #ifdef LIBGCC_NEEDS_DOUBLE
3219 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3220 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3221 #endif
3223 /* There's no need to call protect_from_queue, because
3224 either emit_move_insn or emit_push_insn will do that. */
3226 /* Make sure it is a reasonable operand for a move or push insn. */
3227 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3228 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3229 val = force_operand (val, NULL_RTX);
3231 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3232 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3234 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
3235 be viewed as just an efficiency improvement. */
3236 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3237 emit_move_insn (slot, val);
3238 val = XEXP (slot, 0);
3239 mode = Pmode;
3241 #endif
3243 argvec[count].value = val;
3244 argvec[count].mode = mode;
3246 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3247 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
3248 abort ();
3249 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3250 argvec[count].partial
3251 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3252 #else
3253 argvec[count].partial = 0;
3254 #endif
3256 locate_and_pad_parm (mode, NULL_TREE,
3257 argvec[count].reg && argvec[count].partial == 0,
3258 NULL_TREE, &args_size, &argvec[count].offset,
3259 &argvec[count].size);
3261 if (argvec[count].size.var)
3262 abort ();
3264 if (reg_parm_stack_space == 0 && argvec[count].partial)
3265 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3267 if (argvec[count].reg == 0 || argvec[count].partial != 0
3268 || reg_parm_stack_space > 0)
3269 args_size.constant += argvec[count].size.constant;
3271 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3273 va_end (p);
3275 #ifdef FINAL_REG_PARM_STACK_SPACE
3276 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3277 args_size.var);
3278 #endif
3279 /* If this machine requires an external definition for library
3280 functions, write one out. */
3281 assemble_external_libcall (fun);
3283 original_args_size = args_size;
3284 #ifdef PREFERRED_STACK_BOUNDARY
3285 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
3286 / STACK_BYTES) * STACK_BYTES);
3287 #endif
3289 args_size.constant = MAX (args_size.constant,
3290 reg_parm_stack_space);
3292 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3293 args_size.constant -= reg_parm_stack_space;
3294 #endif
3296 if (args_size.constant > current_function_outgoing_args_size)
3297 current_function_outgoing_args_size = args_size.constant;
3299 #ifdef ACCUMULATE_OUTGOING_ARGS
3300 /* Since the stack pointer will never be pushed, it is possible for
3301 the evaluation of a parm to clobber something we have already
3302 written to the stack. Since most function calls on RISC machines
3303 do not use the stack, this is uncommon, but must work correctly.
3305 Therefore, we save any area of the stack that was already written
3306 and that we are using. Here we set up to do this by making a new
3307 stack usage map from the old one.
3309 Another approach might be to try to reorder the argument
3310 evaluations to avoid this conflicting stack usage. */
3312 needed = args_size.constant;
3314 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3315 /* Since we will be writing into the entire argument area, the
3316 map must be allocated for its entire size, not just the part that
3317 is the responsibility of the caller. */
3318 needed += reg_parm_stack_space;
3319 #endif
3321 #ifdef ARGS_GROW_DOWNWARD
3322 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3323 needed + 1);
3324 #else
3325 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3326 needed);
3327 #endif
3328 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3330 if (initial_highest_arg_in_use)
3331 bcopy (initial_stack_usage_map, stack_usage_map,
3332 initial_highest_arg_in_use);
3334 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3335 bzero (&stack_usage_map[initial_highest_arg_in_use],
3336 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3337 needed = 0;
3339 /* The address of the outgoing argument list must not be copied to a
3340 register here, because argblock would be left pointing to the
3341 wrong place after the call to allocate_dynamic_stack_space below.
3344 argblock = virtual_outgoing_args_rtx;
3345 #else /* not ACCUMULATE_OUTGOING_ARGS */
3346 #ifndef PUSH_ROUNDING
3347 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3348 #endif
3349 #endif
3351 #ifdef PUSH_ARGS_REVERSED
3352 #ifdef PREFERRED_STACK_BOUNDARY
3353 /* If we push args individually in reverse order, perform stack alignment
3354 before the first push (the last arg). */
3355 if (argblock == 0)
3356 anti_adjust_stack (GEN_INT (args_size.constant
3357 - original_args_size.constant));
3358 #endif
3359 #endif
3361 #ifdef PUSH_ARGS_REVERSED
3362 inc = -1;
3363 argnum = nargs - 1;
3364 #else
3365 inc = 1;
3366 argnum = 0;
3367 #endif
3369 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3370 /* The argument list is the property of the called routine and it
3371 may clobber it. If the fixed area has been used for previous
3372 parameters, we must save and restore it.
3374 Here we compute the boundary of the that needs to be saved, if any. */
3376 #ifdef ARGS_GROW_DOWNWARD
3377 for (count = 0; count < reg_parm_stack_space + 1; count++)
3378 #else
3379 for (count = 0; count < reg_parm_stack_space; count++)
3380 #endif
3382 if (count >= highest_outgoing_arg_in_use
3383 || stack_usage_map[count] == 0)
3384 continue;
3386 if (low_to_save == -1)
3387 low_to_save = count;
3389 high_to_save = count;
3392 if (low_to_save >= 0)
3394 int num_to_save = high_to_save - low_to_save + 1;
3395 enum machine_mode save_mode
3396 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3397 rtx stack_area;
3399 /* If we don't have the required alignment, must do this in BLKmode. */
3400 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3401 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3402 save_mode = BLKmode;
3404 #ifdef ARGS_GROW_DOWNWARD
3405 stack_area = gen_rtx_MEM (save_mode,
3406 memory_address (save_mode,
3407 plus_constant (argblock,
3408 - high_to_save)));
3409 #else
3410 stack_area = gen_rtx_MEM (save_mode,
3411 memory_address (save_mode,
3412 plus_constant (argblock,
3413 low_to_save)));
3414 #endif
3415 if (save_mode == BLKmode)
3417 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3418 emit_block_move (validize_mem (save_area), stack_area,
3419 GEN_INT (num_to_save),
3420 PARM_BOUNDARY / BITS_PER_UNIT);
3422 else
3424 save_area = gen_reg_rtx (save_mode);
3425 emit_move_insn (save_area, stack_area);
3428 #endif
3430 /* Push the args that need to be pushed. */
3432 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3433 are to be pushed. */
3434 for (count = 0; count < nargs; count++, argnum += inc)
3436 register enum machine_mode mode = argvec[argnum].mode;
3437 register rtx val = argvec[argnum].value;
3438 rtx reg = argvec[argnum].reg;
3439 int partial = argvec[argnum].partial;
3440 #ifdef ACCUMULATE_OUTGOING_ARGS
3441 int lower_bound, upper_bound, i;
3442 #endif
3444 if (! (reg != 0 && partial == 0))
3446 #ifdef ACCUMULATE_OUTGOING_ARGS
3447 /* If this is being stored into a pre-allocated, fixed-size, stack
3448 area, save any previous data at that location. */
3450 #ifdef ARGS_GROW_DOWNWARD
3451 /* stack_slot is negative, but we want to index stack_usage_map
3452 with positive values. */
3453 upper_bound = -argvec[argnum].offset.constant + 1;
3454 lower_bound = upper_bound - argvec[argnum].size.constant;
3455 #else
3456 lower_bound = argvec[argnum].offset.constant;
3457 upper_bound = lower_bound + argvec[argnum].size.constant;
3458 #endif
3460 for (i = lower_bound; i < upper_bound; i++)
3461 if (stack_usage_map[i]
3462 /* Don't store things in the fixed argument area at this point;
3463 it has already been saved. */
3464 && i > reg_parm_stack_space)
3465 break;
3467 if (i != upper_bound)
3469 /* We need to make a save area. See what mode we can make it. */
3470 enum machine_mode save_mode
3471 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
3472 MODE_INT, 1);
3473 rtx stack_area
3474 = gen_rtx_MEM
3475 (save_mode,
3476 memory_address
3477 (save_mode,
3478 plus_constant (argblock,
3479 argvec[argnum].offset.constant)));
3480 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3482 emit_move_insn (argvec[argnum].save_area, stack_area);
3484 #endif
3485 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
3486 argblock, GEN_INT (argvec[argnum].offset.constant),
3487 reg_parm_stack_space);
3489 #ifdef ACCUMULATE_OUTGOING_ARGS
3490 /* Now mark the segment we just used. */
3491 for (i = lower_bound; i < upper_bound; i++)
3492 stack_usage_map[i] = 1;
3493 #endif
3495 NO_DEFER_POP;
3499 #ifndef PUSH_ARGS_REVERSED
3500 #ifdef PREFERRED_STACK_BOUNDARY
3501 /* If we pushed args in forward order, perform stack alignment
3502 after pushing the last arg. */
3503 if (argblock == 0)
3504 anti_adjust_stack (GEN_INT (args_size.constant
3505 - original_args_size.constant));
3506 #endif
3507 #endif
3509 #ifdef PUSH_ARGS_REVERSED
3510 argnum = nargs - 1;
3511 #else
3512 argnum = 0;
3513 #endif
3515 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
3517 /* Now load any reg parms into their regs. */
3519 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3520 are to be pushed. */
3521 for (count = 0; count < nargs; count++, argnum += inc)
3523 register rtx val = argvec[argnum].value;
3524 rtx reg = argvec[argnum].reg;
3525 int partial = argvec[argnum].partial;
3527 if (reg != 0 && partial == 0)
3528 emit_move_insn (reg, val);
3529 NO_DEFER_POP;
3532 #if 0
3533 /* For version 1.37, try deleting this entirely. */
3534 if (! no_queue)
3535 emit_queue ();
3536 #endif
3538 /* Any regs containing parms remain in use through the call. */
3539 for (count = 0; count < nargs; count++)
3540 if (argvec[count].reg != 0)
3541 use_reg (&call_fusage, argvec[count].reg);
3543 /* Pass the function the address in which to return a structure value. */
3544 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3546 emit_move_insn (struct_value_rtx,
3547 force_reg (Pmode,
3548 force_operand (XEXP (mem_value, 0),
3549 NULL_RTX)));
3550 if (GET_CODE (struct_value_rtx) == REG)
3551 use_reg (&call_fusage, struct_value_rtx);
3554 /* Don't allow popping to be deferred, since then
3555 cse'ing of library calls could delete a call and leave the pop. */
3556 NO_DEFER_POP;
3558 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3559 will set inhibit_defer_pop to that value. */
3560 /* See the comment in emit_library_call about the function type we build
3561 and pass here. */
3563 emit_call_1 (fun,
3564 get_identifier (XSTR (orgfun, 0)),
3565 build_function_type (type_for_mode (outmode, 0), NULL_TREE),
3566 original_args_size.constant, args_size.constant,
3567 struct_value_size,
3568 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3569 mem_value == 0 ? hard_libcall_value (outmode) : NULL_RTX,
3570 old_inhibit_defer_pop + 1, call_fusage, is_const);
3572 /* Now restore inhibit_defer_pop to its actual original value. */
3573 OK_DEFER_POP;
3575 pop_temp_slots ();
3577 /* Copy the value to the right place. */
3578 if (outmode != VOIDmode)
3580 if (mem_value)
3582 if (value == 0)
3583 value = mem_value;
3584 if (value != mem_value)
3585 emit_move_insn (value, mem_value);
3587 else if (value != 0)
3588 emit_move_insn (value, hard_libcall_value (outmode));
3589 else
3590 value = hard_libcall_value (outmode);
3593 #ifdef ACCUMULATE_OUTGOING_ARGS
3594 #ifdef REG_PARM_STACK_SPACE
3595 if (save_area)
3597 enum machine_mode save_mode = GET_MODE (save_area);
3598 #ifdef ARGS_GROW_DOWNWARD
3599 rtx stack_area
3600 = gen_rtx_MEM (save_mode,
3601 memory_address (save_mode,
3602 plus_constant (argblock,
3603 - high_to_save)));
3604 #else
3605 rtx stack_area
3606 = gen_rtx_MEM (save_mode,
3607 memory_address (save_mode,
3608 plus_constant (argblock, low_to_save)));
3609 #endif
3610 if (save_mode != BLKmode)
3611 emit_move_insn (stack_area, save_area);
3612 else
3613 emit_block_move (stack_area, validize_mem (save_area),
3614 GEN_INT (high_to_save - low_to_save + 1),
3615 PARM_BOUNDARY / BITS_PER_UNIT);
3617 #endif
3619 /* If we saved any argument areas, restore them. */
3620 for (count = 0; count < nargs; count++)
3621 if (argvec[count].save_area)
3623 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3624 rtx stack_area
3625 = gen_rtx_MEM (save_mode,
3626 memory_address
3627 (save_mode,
3628 plus_constant (argblock,
3629 argvec[count].offset.constant)));
3631 emit_move_insn (stack_area, argvec[count].save_area);
3634 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3635 stack_usage_map = initial_stack_usage_map;
3636 #endif
3638 return value;
3641 #if 0
3642 /* Return an rtx which represents a suitable home on the stack
3643 given TYPE, the type of the argument looking for a home.
3644 This is called only for BLKmode arguments.
3646 SIZE is the size needed for this target.
3647 ARGS_ADDR is the address of the bottom of the argument block for this call.
3648 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3649 if this machine uses push insns. */
3651 static rtx
3652 target_for_arg (type, size, args_addr, offset)
3653 tree type;
3654 rtx size;
3655 rtx args_addr;
3656 struct args_size offset;
3658 rtx target;
3659 rtx offset_rtx = ARGS_SIZE_RTX (offset);
3661 /* We do not call memory_address if possible,
3662 because we want to address as close to the stack
3663 as possible. For non-variable sized arguments,
3664 this will be stack-pointer relative addressing. */
3665 if (GET_CODE (offset_rtx) == CONST_INT)
3666 target = plus_constant (args_addr, INTVAL (offset_rtx));
3667 else
3669 /* I have no idea how to guarantee that this
3670 will work in the presence of register parameters. */
3671 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
3672 target = memory_address (QImode, target);
3675 return gen_rtx_MEM (BLKmode, target);
3677 #endif
3679 /* Store a single argument for a function call
3680 into the register or memory area where it must be passed.
3681 *ARG describes the argument value and where to pass it.
3683 ARGBLOCK is the address of the stack-block for all the arguments,
3684 or 0 on a machine where arguments are pushed individually.
3686 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3687 so must be careful about how the stack is used.
3689 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3690 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3691 that we need not worry about saving and restoring the stack.
3693 FNDECL is the declaration of the function we are calling. */
3695 static void
3696 store_one_arg (arg, argblock, may_be_alloca, variable_size,
3697 reg_parm_stack_space)
3698 struct arg_data *arg;
3699 rtx argblock;
3700 int may_be_alloca;
3701 int variable_size ATTRIBUTE_UNUSED;
3702 int reg_parm_stack_space;
3704 register tree pval = arg->tree_value;
3705 rtx reg = 0;
3706 int partial = 0;
3707 int used = 0;
3708 #ifdef ACCUMULATE_OUTGOING_ARGS
3709 int i, lower_bound = 0, upper_bound = 0;
3710 #endif
3712 if (TREE_CODE (pval) == ERROR_MARK)
3713 return;
3715 /* Push a new temporary level for any temporaries we make for
3716 this argument. */
3717 push_temp_slots ();
3719 #ifdef ACCUMULATE_OUTGOING_ARGS
3720 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3721 save any previous data at that location. */
3722 if (argblock && ! variable_size && arg->stack)
3724 #ifdef ARGS_GROW_DOWNWARD
3725 /* stack_slot is negative, but we want to index stack_usage_map
3726 with positive values. */
3727 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3728 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3729 else
3730 upper_bound = 0;
3732 lower_bound = upper_bound - arg->size.constant;
3733 #else
3734 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3735 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3736 else
3737 lower_bound = 0;
3739 upper_bound = lower_bound + arg->size.constant;
3740 #endif
3742 for (i = lower_bound; i < upper_bound; i++)
3743 if (stack_usage_map[i]
3744 /* Don't store things in the fixed argument area at this point;
3745 it has already been saved. */
3746 && i > reg_parm_stack_space)
3747 break;
3749 if (i != upper_bound)
3751 /* We need to make a save area. See what mode we can make it. */
3752 enum machine_mode save_mode
3753 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
3754 rtx stack_area
3755 = gen_rtx_MEM (save_mode,
3756 memory_address (save_mode,
3757 XEXP (arg->stack_slot, 0)));
3759 if (save_mode == BLKmode)
3761 arg->save_area = assign_stack_temp (BLKmode,
3762 arg->size.constant, 0);
3763 MEM_SET_IN_STRUCT_P (arg->save_area,
3764 AGGREGATE_TYPE_P (TREE_TYPE
3765 (arg->tree_value)));
3766 preserve_temp_slots (arg->save_area);
3767 emit_block_move (validize_mem (arg->save_area), stack_area,
3768 GEN_INT (arg->size.constant),
3769 PARM_BOUNDARY / BITS_PER_UNIT);
3771 else
3773 arg->save_area = gen_reg_rtx (save_mode);
3774 emit_move_insn (arg->save_area, stack_area);
3779 /* Now that we have saved any slots that will be overwritten by this
3780 store, mark all slots this store will use. We must do this before
3781 we actually expand the argument since the expansion itself may
3782 trigger library calls which might need to use the same stack slot. */
3783 if (argblock && ! variable_size && arg->stack)
3784 for (i = lower_bound; i < upper_bound; i++)
3785 stack_usage_map[i] = 1;
3786 #endif
3788 /* If this isn't going to be placed on both the stack and in registers,
3789 set up the register and number of words. */
3790 if (! arg->pass_on_stack)
3791 reg = arg->reg, partial = arg->partial;
3793 if (reg != 0 && partial == 0)
3794 /* Being passed entirely in a register. We shouldn't be called in
3795 this case. */
3796 abort ();
3798 /* If this arg needs special alignment, don't load the registers
3799 here. */
3800 if (arg->n_aligned_regs != 0)
3801 reg = 0;
3803 /* If this is being passed partially in a register, we can't evaluate
3804 it directly into its stack slot. Otherwise, we can. */
3805 if (arg->value == 0)
3807 #ifdef ACCUMULATE_OUTGOING_ARGS
3808 /* stack_arg_under_construction is nonzero if a function argument is
3809 being evaluated directly into the outgoing argument list and
3810 expand_call must take special action to preserve the argument list
3811 if it is called recursively.
3813 For scalar function arguments stack_usage_map is sufficient to
3814 determine which stack slots must be saved and restored. Scalar
3815 arguments in general have pass_on_stack == 0.
3817 If this argument is initialized by a function which takes the
3818 address of the argument (a C++ constructor or a C function
3819 returning a BLKmode structure), then stack_usage_map is
3820 insufficient and expand_call must push the stack around the
3821 function call. Such arguments have pass_on_stack == 1.
3823 Note that it is always safe to set stack_arg_under_construction,
3824 but this generates suboptimal code if set when not needed. */
3826 if (arg->pass_on_stack)
3827 stack_arg_under_construction++;
3828 #endif
3829 arg->value = expand_expr (pval,
3830 (partial
3831 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
3832 ? NULL_RTX : arg->stack,
3833 VOIDmode, 0);
3835 /* If we are promoting object (or for any other reason) the mode
3836 doesn't agree, convert the mode. */
3838 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
3839 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
3840 arg->value, arg->unsignedp);
3842 #ifdef ACCUMULATE_OUTGOING_ARGS
3843 if (arg->pass_on_stack)
3844 stack_arg_under_construction--;
3845 #endif
3848 /* Don't allow anything left on stack from computation
3849 of argument to alloca. */
3850 if (may_be_alloca)
3851 do_pending_stack_adjust ();
3853 if (arg->value == arg->stack)
3855 /* If the value is already in the stack slot, we are done. */
3856 if (current_function_check_memory_usage && GET_CODE (arg->stack) == MEM)
3858 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3859 XEXP (arg->stack, 0), Pmode,
3860 ARGS_SIZE_RTX (arg->size),
3861 TYPE_MODE (sizetype),
3862 GEN_INT (MEMORY_USE_RW),
3863 TYPE_MODE (integer_type_node));
3866 else if (arg->mode != BLKmode)
3868 register int size;
3870 /* Argument is a scalar, not entirely passed in registers.
3871 (If part is passed in registers, arg->partial says how much
3872 and emit_push_insn will take care of putting it there.)
3874 Push it, and if its size is less than the
3875 amount of space allocated to it,
3876 also bump stack pointer by the additional space.
3877 Note that in C the default argument promotions
3878 will prevent such mismatches. */
3880 size = GET_MODE_SIZE (arg->mode);
3881 /* Compute how much space the push instruction will push.
3882 On many machines, pushing a byte will advance the stack
3883 pointer by a halfword. */
3884 #ifdef PUSH_ROUNDING
3885 size = PUSH_ROUNDING (size);
3886 #endif
3887 used = size;
3889 /* Compute how much space the argument should get:
3890 round up to a multiple of the alignment for arguments. */
3891 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
3892 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
3893 / (PARM_BOUNDARY / BITS_PER_UNIT))
3894 * (PARM_BOUNDARY / BITS_PER_UNIT));
3896 /* This isn't already where we want it on the stack, so put it there.
3897 This can either be done with push or copy insns. */
3898 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
3899 partial, reg, used - size, argblock,
3900 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space);
3902 else
3904 /* BLKmode, at least partly to be pushed. */
3906 register int excess;
3907 rtx size_rtx;
3909 /* Pushing a nonscalar.
3910 If part is passed in registers, PARTIAL says how much
3911 and emit_push_insn will take care of putting it there. */
3913 /* Round its size up to a multiple
3914 of the allocation unit for arguments. */
3916 if (arg->size.var != 0)
3918 excess = 0;
3919 size_rtx = ARGS_SIZE_RTX (arg->size);
3921 else
3923 /* PUSH_ROUNDING has no effect on us, because
3924 emit_push_insn for BLKmode is careful to avoid it. */
3925 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
3926 + partial * UNITS_PER_WORD);
3927 size_rtx = expr_size (pval);
3930 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
3931 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
3932 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset),
3933 reg_parm_stack_space);
3937 /* Unless this is a partially-in-register argument, the argument is now
3938 in the stack.
3940 ??? Note that this can change arg->value from arg->stack to
3941 arg->stack_slot and it matters when they are not the same.
3942 It isn't totally clear that this is correct in all cases. */
3943 if (partial == 0)
3944 arg->value = arg->stack_slot;
3946 /* Once we have pushed something, pops can't safely
3947 be deferred during the rest of the arguments. */
3948 NO_DEFER_POP;
3950 /* ANSI doesn't require a sequence point here,
3951 but PCC has one, so this will avoid some problems. */
3952 emit_queue ();
3954 /* Free any temporary slots made in processing this argument. Show
3955 that we might have taken the address of something and pushed that
3956 as an operand. */
3957 preserve_temp_slots (NULL_RTX);
3958 free_temp_slots ();
3959 pop_temp_slots ();