Initial revision
[official-gcc.git] / gcc / calls.c
blobe76412c6f75a7ca6cc3a345f62765e689e09510e
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
20 #include "config.h"
21 #include "rtl.h"
22 #include "tree.h"
23 #include "flags.h"
24 #include "expr.h"
25 #include "gvarargs.h"
26 #include "insn-flags.h"
28 /* Decide whether a function's arguments should be processed
29 from first to last or from last to first.
31 They should if the stack and args grow in opposite directions, but
32 only if we have push insns. */
34 #ifdef PUSH_ROUNDING
36 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
37 #define PUSH_ARGS_REVERSED /* If it's last to first */
38 #endif
40 #endif
42 /* Like STACK_BOUNDARY but in units of bytes, not bits. */
43 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
45 /* Data structure and subroutines used within expand_call. */
47 struct arg_data
49 /* Tree node for this argument. */
50 tree tree_value;
51 /* Mode for value; TYPE_MODE unless promoted. */
52 enum machine_mode mode;
53 /* Current RTL value for argument, or 0 if it isn't precomputed. */
54 rtx value;
55 /* Initially-compute RTL value for argument; only for const functions. */
56 rtx initial_value;
57 /* Register to pass this argument in, 0 if passed on stack, or an
58 EXPR_LIST if the arg is to be copied into multiple different
59 registers. */
60 rtx reg;
61 /* If REG was promoted from the actual mode of the argument expression,
62 indicates whether the promotion is sign- or zero-extended. */
63 int unsignedp;
64 /* Number of registers to use. 0 means put the whole arg in registers.
65 Also 0 if not passed in registers. */
66 int partial;
67 /* Non-zero if argument must be passed on stack.
68 Note that some arguments may be passed on the stack
69 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
70 pass_on_stack identifies arguments that *cannot* go in registers. */
71 int pass_on_stack;
72 /* Offset of this argument from beginning of stack-args. */
73 struct args_size offset;
74 /* Similar, but offset to the start of the stack slot. Different from
75 OFFSET if this arg pads downward. */
76 struct args_size slot_offset;
77 /* Size of this argument on the stack, rounded up for any padding it gets,
78 parts of the argument passed in registers do not count.
79 If REG_PARM_STACK_SPACE is defined, then register parms
80 are counted here as well. */
81 struct args_size size;
82 /* Location on the stack at which parameter should be stored. The store
83 has already been done if STACK == VALUE. */
84 rtx stack;
85 /* Location on the stack of the start of this argument slot. This can
86 differ from STACK if this arg pads downward. This location is known
87 to be aligned to FUNCTION_ARG_BOUNDARY. */
88 rtx stack_slot;
89 #ifdef ACCUMULATE_OUTGOING_ARGS
90 /* Place that this stack area has been saved, if needed. */
91 rtx save_area;
92 #endif
93 #ifdef STRICT_ALIGNMENT
94 /* If an argument's alignment does not permit direct copying into registers,
95 copy in smaller-sized pieces into pseudos. These are stored in a
96 block pointed to by this field. The next field says how many
97 word-sized pseudos we made. */
98 rtx *aligned_regs;
99 int n_aligned_regs;
100 #endif
103 #ifdef ACCUMULATE_OUTGOING_ARGS
104 /* A vector of one char per byte of stack space. A byte if non-zero if
105 the corresponding stack location has been used.
106 This vector is used to prevent a function call within an argument from
107 clobbering any stack already set up. */
108 static char *stack_usage_map;
110 /* Size of STACK_USAGE_MAP. */
111 static int highest_outgoing_arg_in_use;
113 /* stack_arg_under_construction is nonzero when an argument may be
114 initialized with a constructor call (including a C function that
115 returns a BLKmode struct) and expand_call must take special action
116 to make sure the object being constructed does not overlap the
117 argument list for the constructor call. */
118 int stack_arg_under_construction;
119 #endif
121 static int calls_function PROTO((tree, int));
122 static int calls_function_1 PROTO((tree, int));
123 static void emit_call_1 PROTO((rtx, tree, int, int, rtx, rtx, int,
124 rtx, int));
125 static void store_one_arg PROTO ((struct arg_data *, rtx, int, int,
126 tree, int));
128 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
129 `alloca'.
131 If WHICH is 0, return 1 if EXP contains a call to any function.
132 Actually, we only need return 1 if evaluating EXP would require pushing
133 arguments on the stack, but that is too difficult to compute, so we just
134 assume any function call might require the stack. */
136 static tree calls_function_save_exprs;
138 static int
139 calls_function (exp, which)
140 tree exp;
141 int which;
143 int val;
144 calls_function_save_exprs = 0;
145 val = calls_function_1 (exp, which);
146 calls_function_save_exprs = 0;
147 return val;
150 static int
151 calls_function_1 (exp, which)
152 tree exp;
153 int which;
155 register int i;
156 int type = TREE_CODE_CLASS (TREE_CODE (exp));
157 int length = tree_code_length[(int) TREE_CODE (exp)];
159 /* Only expressions and references can contain calls. */
161 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
162 && type != 'b')
163 return 0;
165 switch (TREE_CODE (exp))
167 case CALL_EXPR:
168 if (which == 0)
169 return 1;
170 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
171 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
172 == FUNCTION_DECL)
173 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
174 && (DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
175 == BUILT_IN_ALLOCA))
176 return 1;
178 /* Third operand is RTL. */
179 length = 2;
180 break;
182 case SAVE_EXPR:
183 if (SAVE_EXPR_RTL (exp) != 0)
184 return 0;
185 if (value_member (exp, calls_function_save_exprs))
186 return 0;
187 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
188 calls_function_save_exprs);
189 return (TREE_OPERAND (exp, 0) != 0
190 && calls_function_1 (TREE_OPERAND (exp, 0), which));
192 case BLOCK:
194 register tree local;
196 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
197 if (DECL_INITIAL (local) != 0
198 && calls_function_1 (DECL_INITIAL (local), which))
199 return 1;
202 register tree subblock;
204 for (subblock = BLOCK_SUBBLOCKS (exp);
205 subblock;
206 subblock = TREE_CHAIN (subblock))
207 if (calls_function_1 (subblock, which))
208 return 1;
210 return 0;
212 case METHOD_CALL_EXPR:
213 length = 3;
214 break;
216 case WITH_CLEANUP_EXPR:
217 length = 1;
218 break;
220 case RTL_EXPR:
221 return 0;
224 for (i = 0; i < length; i++)
225 if (TREE_OPERAND (exp, i) != 0
226 && calls_function_1 (TREE_OPERAND (exp, i), which))
227 return 1;
229 return 0;
232 /* Force FUNEXP into a form suitable for the address of a CALL,
233 and return that as an rtx. Also load the static chain register
234 if FNDECL is a nested function.
236 USE_INSNS points to a variable holding a chain of USE insns
237 to which a USE of the static chain
238 register should be added, if required. */
241 prepare_call_address (funexp, fndecl, use_insns)
242 rtx funexp;
243 tree fndecl;
244 rtx *use_insns;
246 rtx static_chain_value = 0;
248 funexp = protect_from_queue (funexp, 0);
250 if (fndecl != 0)
251 /* Get possible static chain value for nested function in C. */
252 static_chain_value = lookup_static_chain (fndecl);
254 /* Make a valid memory address and copy constants thru pseudo-regs,
255 but not for a constant address if -fno-function-cse. */
256 if (GET_CODE (funexp) != SYMBOL_REF)
257 funexp = memory_address (FUNCTION_MODE, funexp);
258 else
260 #ifndef NO_FUNCTION_CSE
261 if (optimize && ! flag_no_function_cse)
262 #ifdef NO_RECURSIVE_FUNCTION_CSE
263 if (fndecl != current_function_decl)
264 #endif
265 funexp = force_reg (Pmode, funexp);
266 #endif
269 if (static_chain_value != 0)
271 emit_move_insn (static_chain_rtx, static_chain_value);
273 /* Put the USE insn in the chain we were passed. It will later be
274 output immediately in front of the CALL insn. */
275 push_to_sequence (*use_insns);
276 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
277 *use_insns = get_insns ();
278 end_sequence ();
281 return funexp;
284 /* Generate instructions to call function FUNEXP,
285 and optionally pop the results.
286 The CALL_INSN is the first insn generated.
288 FUNTYPE is the data type of the function, or, for a library call,
289 the identifier for the name of the call. This is given to the
290 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
292 STACK_SIZE is the number of bytes of arguments on the stack,
293 rounded up to STACK_BOUNDARY; zero if the size is variable.
294 This is both to put into the call insn and
295 to generate explicit popping code if necessary.
297 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
298 It is zero if this call doesn't want a structure value.
300 NEXT_ARG_REG is the rtx that results from executing
301 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
302 just after all the args have had their registers assigned.
303 This could be whatever you like, but normally it is the first
304 arg-register beyond those used for args in this call,
305 or 0 if all the arg-registers are used in this call.
306 It is passed on to `gen_call' so you can put this info in the call insn.
308 VALREG is a hard register in which a value is returned,
309 or 0 if the call does not return a value.
311 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
312 the args to this call were processed.
313 We restore `inhibit_defer_pop' to that value.
315 USE_INSNS is a chain of USE insns to be emitted immediately before
316 the actual CALL insn.
318 IS_CONST is true if this is a `const' call. */
320 static void
321 emit_call_1 (funexp, funtype, stack_size, struct_value_size, next_arg_reg,
322 valreg, old_inhibit_defer_pop, use_insns, is_const)
323 rtx funexp;
324 tree funtype;
325 int stack_size;
326 int struct_value_size;
327 rtx next_arg_reg;
328 rtx valreg;
329 int old_inhibit_defer_pop;
330 rtx use_insns;
331 int is_const;
333 rtx stack_size_rtx = GEN_INT (stack_size);
334 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
335 rtx call_insn;
336 int already_popped = 0;
338 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
339 and we don't want to load it into a register as an optimization,
340 because prepare_call_address already did it if it should be done. */
341 if (GET_CODE (funexp) != SYMBOL_REF)
342 funexp = memory_address (FUNCTION_MODE, funexp);
344 #ifndef ACCUMULATE_OUTGOING_ARGS
345 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
346 if (HAVE_call_pop && HAVE_call_value_pop
347 && (RETURN_POPS_ARGS (funtype, stack_size) > 0 || stack_size == 0))
349 rtx n_pop = GEN_INT (RETURN_POPS_ARGS (funtype, stack_size));
350 rtx pat;
352 /* If this subroutine pops its own args, record that in the call insn
353 if possible, for the sake of frame pointer elimination. */
354 if (valreg)
355 pat = gen_call_value_pop (valreg,
356 gen_rtx (MEM, FUNCTION_MODE, funexp),
357 stack_size_rtx, next_arg_reg, n_pop);
358 else
359 pat = gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, funexp),
360 stack_size_rtx, next_arg_reg, n_pop);
362 emit_call_insn (pat);
363 already_popped = 1;
365 else
366 #endif
367 #endif
369 #if defined (HAVE_call) && defined (HAVE_call_value)
370 if (HAVE_call && HAVE_call_value)
372 if (valreg)
373 emit_call_insn (gen_call_value (valreg,
374 gen_rtx (MEM, FUNCTION_MODE, funexp),
375 stack_size_rtx, next_arg_reg,
376 NULL_RTX));
377 else
378 emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, funexp),
379 stack_size_rtx, next_arg_reg,
380 struct_value_size_rtx));
382 else
383 #endif
384 abort ();
386 /* Find the CALL insn we just emitted and write the USE insns before it. */
387 for (call_insn = get_last_insn ();
388 call_insn && GET_CODE (call_insn) != CALL_INSN;
389 call_insn = PREV_INSN (call_insn))
392 if (! call_insn)
393 abort ();
395 /* Put the USE insns before the CALL. */
396 emit_insns_before (use_insns, call_insn);
398 /* If this is a const call, then set the insn's unchanging bit. */
399 if (is_const)
400 CONST_CALL_P (call_insn) = 1;
402 /* Restore this now, so that we do defer pops for this call's args
403 if the context of the call as a whole permits. */
404 inhibit_defer_pop = old_inhibit_defer_pop;
406 #ifndef ACCUMULATE_OUTGOING_ARGS
407 /* If returning from the subroutine does not automatically pop the args,
408 we need an instruction to pop them sooner or later.
409 Perhaps do it now; perhaps just record how much space to pop later.
411 If returning from the subroutine does pop the args, indicate that the
412 stack pointer will be changed. */
414 if (stack_size != 0 && RETURN_POPS_ARGS (funtype, stack_size) > 0)
416 if (!already_popped)
417 emit_insn (gen_rtx (CLOBBER, VOIDmode, stack_pointer_rtx));
418 stack_size -= RETURN_POPS_ARGS (funtype, stack_size);
419 stack_size_rtx = GEN_INT (stack_size);
422 if (stack_size != 0)
424 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
425 pending_stack_adjust += stack_size;
426 else
427 adjust_stack (stack_size_rtx);
429 #endif
432 /* Generate all the code for a function call
433 and return an rtx for its value.
434 Store the value in TARGET (specified as an rtx) if convenient.
435 If the value is stored in TARGET then TARGET is returned.
436 If IGNORE is nonzero, then we ignore the value of the function call. */
439 expand_call (exp, target, ignore)
440 tree exp;
441 rtx target;
442 int ignore;
444 /* List of actual parameters. */
445 tree actparms = TREE_OPERAND (exp, 1);
446 /* RTX for the function to be called. */
447 rtx funexp;
448 /* Tree node for the function to be called (not the address!). */
449 tree funtree;
450 /* Data type of the function. */
451 tree funtype;
452 /* Declaration of the function being called,
453 or 0 if the function is computed (not known by name). */
454 tree fndecl = 0;
455 char *name = 0;
457 /* Register in which non-BLKmode value will be returned,
458 or 0 if no value or if value is BLKmode. */
459 rtx valreg;
460 /* Address where we should return a BLKmode value;
461 0 if value not BLKmode. */
462 rtx structure_value_addr = 0;
463 /* Nonzero if that address is being passed by treating it as
464 an extra, implicit first parameter. Otherwise,
465 it is passed by being copied directly into struct_value_rtx. */
466 int structure_value_addr_parm = 0;
467 /* Size of aggregate value wanted, or zero if none wanted
468 or if we are using the non-reentrant PCC calling convention
469 or expecting the value in registers. */
470 int struct_value_size = 0;
471 /* Nonzero if called function returns an aggregate in memory PCC style,
472 by returning the address of where to find it. */
473 int pcc_struct_value = 0;
475 /* Number of actual parameters in this call, including struct value addr. */
476 int num_actuals;
477 /* Number of named args. Args after this are anonymous ones
478 and they must all go on the stack. */
479 int n_named_args;
480 /* Count arg position in order args appear. */
481 int argpos;
483 /* Vector of information about each argument.
484 Arguments are numbered in the order they will be pushed,
485 not the order they are written. */
486 struct arg_data *args;
488 /* Total size in bytes of all the stack-parms scanned so far. */
489 struct args_size args_size;
490 /* Size of arguments before any adjustments (such as rounding). */
491 struct args_size original_args_size;
492 /* Data on reg parms scanned so far. */
493 CUMULATIVE_ARGS args_so_far;
494 /* Nonzero if a reg parm has been scanned. */
495 int reg_parm_seen;
496 /* Nonzero if this is an indirect function call. */
497 int current_call_is_indirect = 0;
499 /* Nonzero if we must avoid push-insns in the args for this call.
500 If stack space is allocated for register parameters, but not by the
501 caller, then it is preallocated in the fixed part of the stack frame.
502 So the entire argument block must then be preallocated (i.e., we
503 ignore PUSH_ROUNDING in that case). */
505 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
506 int must_preallocate = 1;
507 #else
508 #ifdef PUSH_ROUNDING
509 int must_preallocate = 0;
510 #else
511 int must_preallocate = 1;
512 #endif
513 #endif
515 /* Size of the stack reserved for parameter registers. */
516 int reg_parm_stack_space = 0;
518 /* 1 if scanning parms front to back, -1 if scanning back to front. */
519 int inc;
520 /* Address of space preallocated for stack parms
521 (on machines that lack push insns), or 0 if space not preallocated. */
522 rtx argblock = 0;
524 /* Nonzero if it is plausible that this is a call to alloca. */
525 int may_be_alloca;
526 /* Nonzero if this is a call to setjmp or a related function. */
527 int returns_twice;
528 /* Nonzero if this is a call to `longjmp'. */
529 int is_longjmp;
530 /* Nonzero if this is a call to an inline function. */
531 int is_integrable = 0;
532 /* Nonzero if this is a call to a `const' function.
533 Note that only explicitly named functions are handled as `const' here. */
534 int is_const = 0;
535 /* Nonzero if this is a call to a `volatile' function. */
536 int is_volatile = 0;
537 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
538 /* Define the boundary of the register parm stack space that needs to be
539 save, if any. */
540 int low_to_save = -1, high_to_save;
541 rtx save_area = 0; /* Place that it is saved */
542 #endif
544 #ifdef ACCUMULATE_OUTGOING_ARGS
545 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
546 char *initial_stack_usage_map = stack_usage_map;
547 #endif
549 rtx old_stack_level = 0;
550 int old_pending_adj;
551 int old_stack_arg_under_construction;
552 int old_inhibit_defer_pop = inhibit_defer_pop;
553 tree old_cleanups = cleanups_this_call;
555 rtx use_insns = 0;
557 register tree p;
558 register int i, j;
560 /* See if we can find a DECL-node for the actual function.
561 As a result, decide whether this is a call to an integrable function. */
563 p = TREE_OPERAND (exp, 0);
564 if (TREE_CODE (p) == ADDR_EXPR)
566 fndecl = TREE_OPERAND (p, 0);
567 if (TREE_CODE (fndecl) != FUNCTION_DECL)
569 /* May still be a `const' function if it is
570 a call through a pointer-to-const.
571 But we don't handle that. */
572 fndecl = 0;
574 else
576 if (!flag_no_inline
577 && fndecl != current_function_decl
578 && DECL_SAVED_INSNS (fndecl))
579 is_integrable = 1;
580 else if (! TREE_ADDRESSABLE (fndecl))
582 /* In case this function later becomes inlinable,
583 record that there was already a non-inline call to it.
585 Use abstraction instead of setting TREE_ADDRESSABLE
586 directly. */
587 if (DECL_INLINE (fndecl) && extra_warnings && warn_inline
588 && !flag_no_inline)
589 warning_with_decl (fndecl, "can't inline call to `%s' which was declared inline");
590 mark_addressable (fndecl);
593 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
594 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
595 is_const = 1;
597 if (TREE_THIS_VOLATILE (fndecl))
598 is_volatile = 1;
602 #ifdef REG_PARM_STACK_SPACE
603 #ifdef MAYBE_REG_PARM_STACK_SPACE
604 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
605 #else
606 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
607 #endif
608 #endif
610 /* Warn if this value is an aggregate type,
611 regardless of which calling convention we are using for it. */
612 if (warn_aggregate_return
613 && (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
614 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
615 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
616 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE))
617 warning ("function call has aggregate value");
619 /* Set up a place to return a structure. */
621 /* Cater to broken compilers. */
622 if (aggregate_value_p (exp))
624 /* This call returns a big structure. */
625 is_const = 0;
627 #ifdef PCC_STATIC_STRUCT_RETURN
629 pcc_struct_value = 1;
630 is_integrable = 0; /* Easier than making that case work right. */
632 #else /* not PCC_STATIC_STRUCT_RETURN */
634 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
636 if (struct_value_size < 0)
637 abort ();
639 if (target && GET_CODE (target) == MEM)
640 structure_value_addr = XEXP (target, 0);
641 else
643 /* Assign a temporary on the stack to hold the value. */
645 /* For variable-sized objects, we must be called with a target
646 specified. If we were to allocate space on the stack here,
647 we would have no way of knowing when to free it. */
649 structure_value_addr
650 = XEXP (assign_stack_temp (BLKmode, struct_value_size, 1), 0);
651 target = 0;
654 #endif /* not PCC_STATIC_STRUCT_RETURN */
657 /* If called function is inline, try to integrate it. */
659 if (is_integrable)
661 rtx temp;
662 rtx before_call = get_last_insn ();
664 temp = expand_inline_function (fndecl, actparms, target,
665 ignore, TREE_TYPE (exp),
666 structure_value_addr);
668 /* If inlining succeeded, return. */
669 if ((HOST_WIDE_INT) temp != -1)
671 /* Perform all cleanups needed for the arguments of this call
672 (i.e. destructors in C++). It is ok if these destructors
673 clobber RETURN_VALUE_REG, because the only time we care about
674 this is when TARGET is that register. But in C++, we take
675 care to never return that register directly. */
676 expand_cleanups_to (old_cleanups);
678 #ifdef ACCUMULATE_OUTGOING_ARGS
679 /* If the outgoing argument list must be preserved, push
680 the stack before executing the inlined function if it
681 makes any calls. */
683 for (i = reg_parm_stack_space - 1; i >= 0; i--)
684 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
685 break;
687 if (stack_arg_under_construction || i >= 0)
689 rtx insn = NEXT_INSN (before_call), seq;
691 /* Look for a call in the inline function code.
692 If OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) is
693 nonzero then there is a call and it is not necessary
694 to scan the insns. */
696 if (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) == 0)
697 for (; insn; insn = NEXT_INSN (insn))
698 if (GET_CODE (insn) == CALL_INSN)
699 break;
701 if (insn)
703 /* Reserve enough stack space so that the largest
704 argument list of any function call in the inline
705 function does not overlap the argument list being
706 evaluated. This is usually an overestimate because
707 allocate_dynamic_stack_space reserves space for an
708 outgoing argument list in addition to the requested
709 space, but there is no way to ask for stack space such
710 that an argument list of a certain length can be
711 safely constructed. */
713 int adjust = OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl));
714 #ifdef REG_PARM_STACK_SPACE
715 /* Add the stack space reserved for register arguments
716 in the inline function. What is really needed is the
717 largest value of reg_parm_stack_space in the inline
718 function, but that is not available. Using the current
719 value of reg_parm_stack_space is wrong, but gives
720 correct results on all supported machines. */
721 adjust += reg_parm_stack_space;
722 #endif
723 start_sequence ();
724 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
725 allocate_dynamic_stack_space (GEN_INT (adjust),
726 NULL_RTX, BITS_PER_UNIT);
727 seq = get_insns ();
728 end_sequence ();
729 emit_insns_before (seq, NEXT_INSN (before_call));
730 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
733 #endif
735 /* If the result is equivalent to TARGET, return TARGET to simplify
736 checks in store_expr. They can be equivalent but not equal in the
737 case of a function that returns BLKmode. */
738 if (temp != target && rtx_equal_p (temp, target))
739 return target;
740 return temp;
743 /* If inlining failed, mark FNDECL as needing to be compiled
744 separately after all. */
745 mark_addressable (fndecl);
748 /* When calling a const function, we must pop the stack args right away,
749 so that the pop is deleted or moved with the call. */
750 if (is_const)
751 NO_DEFER_POP;
753 function_call_count++;
755 if (fndecl && DECL_NAME (fndecl))
756 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
758 /* On some machines (such as the PA) indirect calls have a different
759 calling convention than normal calls. FUNCTION_ARG in the target
760 description can look at current_call_is_indirect to determine which
761 calling convention to use. */
762 current_call_is_indirect = (fndecl == 0);
763 #if 0
764 = TREE_CODE (TREE_OPERAND (exp, 0)) == NON_LVALUE_EXPR ? 1 : 0;
765 #endif
767 #if 0
768 /* Unless it's a call to a specific function that isn't alloca,
769 if it has one argument, we must assume it might be alloca. */
771 may_be_alloca =
772 (!(fndecl != 0 && strcmp (name, "alloca"))
773 && actparms != 0
774 && TREE_CHAIN (actparms) == 0);
775 #else
776 /* We assume that alloca will always be called by name. It
777 makes no sense to pass it as a pointer-to-function to
778 anything that does not understand its behavior. */
779 may_be_alloca =
780 (name && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
781 && name[0] == 'a'
782 && ! strcmp (name, "alloca"))
783 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
784 && name[0] == '_'
785 && ! strcmp (name, "__builtin_alloca"))));
786 #endif
788 /* See if this is a call to a function that can return more than once
789 or a call to longjmp. */
791 returns_twice = 0;
792 is_longjmp = 0;
794 if (name != 0 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 15)
796 char *tname = name;
798 if (name[0] == '_')
799 tname += ((name[1] == '_' && name[2] == 'x') ? 3 : 1);
801 if (tname[0] == 's')
803 returns_twice
804 = ((tname[1] == 'e'
805 && (! strcmp (tname, "setjmp")
806 || ! strcmp (tname, "setjmp_syscall")))
807 || (tname[1] == 'i'
808 && ! strcmp (tname, "sigsetjmp"))
809 || (tname[1] == 'a'
810 && ! strcmp (tname, "savectx")));
811 if (tname[1] == 'i'
812 && ! strcmp (tname, "siglongjmp"))
813 is_longjmp = 1;
815 else if ((tname[0] == 'q' && tname[1] == 's'
816 && ! strcmp (tname, "qsetjmp"))
817 || (tname[0] == 'v' && tname[1] == 'f'
818 && ! strcmp (tname, "vfork")))
819 returns_twice = 1;
821 else if (tname[0] == 'l' && tname[1] == 'o'
822 && ! strcmp (tname, "longjmp"))
823 is_longjmp = 1;
826 if (may_be_alloca)
827 current_function_calls_alloca = 1;
829 /* Don't let pending stack adjusts add up to too much.
830 Also, do all pending adjustments now
831 if there is any chance this might be a call to alloca. */
833 if (pending_stack_adjust >= 32
834 || (pending_stack_adjust > 0 && may_be_alloca))
835 do_pending_stack_adjust ();
837 /* Operand 0 is a pointer-to-function; get the type of the function. */
838 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
839 if (TREE_CODE (funtype) != POINTER_TYPE)
840 abort ();
841 funtype = TREE_TYPE (funtype);
843 /* Push the temporary stack slot level so that we can free temporaries used
844 by each of the arguments separately. */
845 push_temp_slots ();
847 /* Start updating where the next arg would go. */
848 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX);
850 /* If struct_value_rtx is 0, it means pass the address
851 as if it were an extra parameter. */
852 if (structure_value_addr && struct_value_rtx == 0)
854 #ifdef ACCUMULATE_OUTGOING_ARGS
855 /* If the stack will be adjusted, make sure the structure address
856 does not refer to virtual_outgoing_args_rtx. */
857 rtx temp = (stack_arg_under_construction
858 ? copy_addr_to_reg (structure_value_addr)
859 : force_reg (Pmode, structure_value_addr));
860 #else
861 rtx temp = force_reg (Pmode, structure_value_addr);
862 #endif
864 actparms
865 = tree_cons (error_mark_node,
866 make_tree (build_pointer_type (TREE_TYPE (funtype)),
867 temp),
868 actparms);
869 structure_value_addr_parm = 1;
872 /* Count the arguments and set NUM_ACTUALS. */
873 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
874 num_actuals = i;
876 /* Compute number of named args.
877 Normally, don't include the last named arg if anonymous args follow.
878 (If no anonymous args follow, the result of list_length
879 is actually one too large.)
881 If SETUP_INCOMING_VARARGS is defined, this machine will be able to
882 place unnamed args that were passed in registers into the stack. So
883 treat all args as named. This allows the insns emitting for a specific
884 argument list to be independent of the function declaration.
886 If SETUP_INCOMING_VARARGS is not defined, we do not have any reliable
887 way to pass unnamed args in registers, so we must force them into
888 memory. */
889 #ifndef SETUP_INCOMING_VARARGS
890 if (TYPE_ARG_TYPES (funtype) != 0)
891 n_named_args
892 = list_length (TYPE_ARG_TYPES (funtype)) - 1
893 /* Count the struct value address, if it is passed as a parm. */
894 + structure_value_addr_parm;
895 else
896 #endif
897 /* If we know nothing, treat all args as named. */
898 n_named_args = num_actuals;
900 /* Make a vector to hold all the information about each arg. */
901 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
902 bzero (args, num_actuals * sizeof (struct arg_data));
904 args_size.constant = 0;
905 args_size.var = 0;
907 /* In this loop, we consider args in the order they are written.
908 We fill up ARGS from the front of from the back if necessary
909 so that in any case the first arg to be pushed ends up at the front. */
911 #ifdef PUSH_ARGS_REVERSED
912 i = num_actuals - 1, inc = -1;
913 /* In this case, must reverse order of args
914 so that we compute and push the last arg first. */
915 #else
916 i = 0, inc = 1;
917 #endif
919 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
920 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
922 tree type = TREE_TYPE (TREE_VALUE (p));
923 enum machine_mode mode;
925 args[i].tree_value = TREE_VALUE (p);
927 /* Replace erroneous argument with constant zero. */
928 if (type == error_mark_node || TYPE_SIZE (type) == 0)
929 args[i].tree_value = integer_zero_node, type = integer_type_node;
931 /* Decide where to pass this arg.
933 args[i].reg is nonzero if all or part is passed in registers.
935 args[i].partial is nonzero if part but not all is passed in registers,
936 and the exact value says how many words are passed in registers.
938 args[i].pass_on_stack is nonzero if the argument must at least be
939 computed on the stack. It may then be loaded back into registers
940 if args[i].reg is nonzero.
942 These decisions are driven by the FUNCTION_... macros and must agree
943 with those made by function.c. */
945 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
946 /* See if this argument should be passed by invisible reference. */
947 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, TYPE_MODE (type), type,
948 argpos < n_named_args))
950 #ifdef FUNCTION_ARG_CALLEE_COPIES
951 if (FUNCTION_ARG_CALLEE_COPIES (args_so_far, TYPE_MODE (type), type,
952 argpos < n_named_args)
953 /* If it's in a register, we must make a copy of it too. */
954 /* ??? Is this a sufficient test? Is there a better one? */
955 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
956 && REG_P (DECL_RTL (args[i].tree_value))))
958 args[i].tree_value = build1 (ADDR_EXPR,
959 build_pointer_type (type),
960 args[i].tree_value);
961 type = build_pointer_type (type);
963 else
964 #endif
966 /* We make a copy of the object and pass the address to the
967 function being called. */
968 rtx copy;
970 if (TYPE_SIZE (type) == 0
971 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
973 /* This is a variable-sized object. Make space on the stack
974 for it. */
975 rtx size_rtx = expr_size (TREE_VALUE (p));
977 if (old_stack_level == 0)
979 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
980 old_pending_adj = pending_stack_adjust;
981 pending_stack_adjust = 0;
984 copy = gen_rtx (MEM, BLKmode,
985 allocate_dynamic_stack_space (size_rtx,
986 NULL_RTX,
987 TYPE_ALIGN (type)));
989 else
991 int size = int_size_in_bytes (type);
992 copy = assign_stack_temp (TYPE_MODE (type), size, 1);
995 store_expr (args[i].tree_value, copy, 0);
997 args[i].tree_value = build1 (ADDR_EXPR,
998 build_pointer_type (type),
999 make_tree (type, copy));
1000 type = build_pointer_type (type);
1003 #endif /* FUNCTION_ARG_PASS_BY_REFERENCE */
1005 mode = TYPE_MODE (type);
1007 #ifdef PROMOTE_FUNCTION_ARGS
1008 /* Compute the mode in which the arg is actually to be extended to. */
1009 if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == ENUMERAL_TYPE
1010 || TREE_CODE (type) == BOOLEAN_TYPE || TREE_CODE (type) == CHAR_TYPE
1011 || TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == POINTER_TYPE
1012 || TREE_CODE (type) == OFFSET_TYPE)
1014 int unsignedp = TREE_UNSIGNED (type);
1015 PROMOTE_MODE (mode, unsignedp, type);
1016 args[i].unsignedp = unsignedp;
1018 #endif
1020 args[i].mode = mode;
1021 args[i].reg = FUNCTION_ARG (args_so_far, mode, type,
1022 argpos < n_named_args);
1023 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1024 if (args[i].reg)
1025 args[i].partial
1026 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, type,
1027 argpos < n_named_args);
1028 #endif
1030 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1032 /* If FUNCTION_ARG returned an (expr_list (nil) FOO), it means that
1033 we are to pass this arg in the register(s) designated by FOO, but
1034 also to pass it in the stack. */
1035 if (args[i].reg && GET_CODE (args[i].reg) == EXPR_LIST
1036 && XEXP (args[i].reg, 0) == 0)
1037 args[i].pass_on_stack = 1, args[i].reg = XEXP (args[i].reg, 1);
1039 /* If this is an addressable type, we must preallocate the stack
1040 since we must evaluate the object into its final location.
1042 If this is to be passed in both registers and the stack, it is simpler
1043 to preallocate. */
1044 if (TREE_ADDRESSABLE (type)
1045 || (args[i].pass_on_stack && args[i].reg != 0))
1046 must_preallocate = 1;
1048 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1049 we cannot consider this function call constant. */
1050 if (TREE_ADDRESSABLE (type))
1051 is_const = 0;
1053 /* Compute the stack-size of this argument. */
1054 if (args[i].reg == 0 || args[i].partial != 0
1055 #ifdef REG_PARM_STACK_SPACE
1056 || reg_parm_stack_space > 0
1057 #endif
1058 || args[i].pass_on_stack)
1059 locate_and_pad_parm (mode, type,
1060 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1062 #else
1063 args[i].reg != 0,
1064 #endif
1065 fndecl, &args_size, &args[i].offset,
1066 &args[i].size);
1068 #ifndef ARGS_GROW_DOWNWARD
1069 args[i].slot_offset = args_size;
1070 #endif
1072 #ifndef REG_PARM_STACK_SPACE
1073 /* If a part of the arg was put into registers,
1074 don't include that part in the amount pushed. */
1075 if (! args[i].pass_on_stack)
1076 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1077 / (PARM_BOUNDARY / BITS_PER_UNIT)
1078 * (PARM_BOUNDARY / BITS_PER_UNIT));
1079 #endif
1081 /* Update ARGS_SIZE, the total stack space for args so far. */
1083 args_size.constant += args[i].size.constant;
1084 if (args[i].size.var)
1086 ADD_PARM_SIZE (args_size, args[i].size.var);
1089 /* Since the slot offset points to the bottom of the slot,
1090 we must record it after incrementing if the args grow down. */
1091 #ifdef ARGS_GROW_DOWNWARD
1092 args[i].slot_offset = args_size;
1094 args[i].slot_offset.constant = -args_size.constant;
1095 if (args_size.var)
1097 SUB_PARM_SIZE (args[i].slot_offset, args_size.var);
1099 #endif
1101 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1102 have been used, etc. */
1104 FUNCTION_ARG_ADVANCE (args_so_far, TYPE_MODE (type), type,
1105 argpos < n_named_args);
1108 #ifdef FINAL_REG_PARM_STACK_SPACE
1109 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
1110 args_size.var);
1111 #endif
1113 /* Compute the actual size of the argument block required. The variable
1114 and constant sizes must be combined, the size may have to be rounded,
1115 and there may be a minimum required size. */
1117 original_args_size = args_size;
1118 if (args_size.var)
1120 /* If this function requires a variable-sized argument list, don't try to
1121 make a cse'able block for this call. We may be able to do this
1122 eventually, but it is too complicated to keep track of what insns go
1123 in the cse'able block and which don't. */
1125 is_const = 0;
1126 must_preallocate = 1;
1128 args_size.var = ARGS_SIZE_TREE (args_size);
1129 args_size.constant = 0;
1131 #ifdef STACK_BOUNDARY
1132 if (STACK_BOUNDARY != BITS_PER_UNIT)
1133 args_size.var = round_up (args_size.var, STACK_BYTES);
1134 #endif
1136 #ifdef REG_PARM_STACK_SPACE
1137 if (reg_parm_stack_space > 0)
1139 args_size.var
1140 = size_binop (MAX_EXPR, args_size.var,
1141 size_int (REG_PARM_STACK_SPACE (fndecl)));
1143 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1144 /* The area corresponding to register parameters is not to count in
1145 the size of the block we need. So make the adjustment. */
1146 args_size.var
1147 = size_binop (MINUS_EXPR, args_size.var,
1148 size_int (reg_parm_stack_space));
1149 #endif
1151 #endif
1153 else
1155 #ifdef STACK_BOUNDARY
1156 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
1157 / STACK_BYTES) * STACK_BYTES);
1158 #endif
1160 #ifdef REG_PARM_STACK_SPACE
1161 args_size.constant = MAX (args_size.constant,
1162 reg_parm_stack_space);
1163 #ifdef MAYBE_REG_PARM_STACK_SPACE
1164 if (reg_parm_stack_space == 0)
1165 args_size.constant = 0;
1166 #endif
1167 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1168 args_size.constant -= reg_parm_stack_space;
1169 #endif
1170 #endif
1173 /* See if we have or want to preallocate stack space.
1175 If we would have to push a partially-in-regs parm
1176 before other stack parms, preallocate stack space instead.
1178 If the size of some parm is not a multiple of the required stack
1179 alignment, we must preallocate.
1181 If the total size of arguments that would otherwise create a copy in
1182 a temporary (such as a CALL) is more than half the total argument list
1183 size, preallocation is faster.
1185 Another reason to preallocate is if we have a machine (like the m88k)
1186 where stack alignment is required to be maintained between every
1187 pair of insns, not just when the call is made. However, we assume here
1188 that such machines either do not have push insns (and hence preallocation
1189 would occur anyway) or the problem is taken care of with
1190 PUSH_ROUNDING. */
1192 if (! must_preallocate)
1194 int partial_seen = 0;
1195 int copy_to_evaluate_size = 0;
1197 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1199 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1200 partial_seen = 1;
1201 else if (partial_seen && args[i].reg == 0)
1202 must_preallocate = 1;
1204 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1205 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1206 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1207 || TREE_CODE (args[i].tree_value) == COND_EXPR
1208 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1209 copy_to_evaluate_size
1210 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1213 if (copy_to_evaluate_size * 2 >= args_size.constant
1214 && args_size.constant > 0)
1215 must_preallocate = 1;
1218 /* If the structure value address will reference the stack pointer, we must
1219 stabilize it. We don't need to do this if we know that we are not going
1220 to adjust the stack pointer in processing this call. */
1222 if (structure_value_addr
1223 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
1224 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
1225 && (args_size.var
1226 #ifndef ACCUMULATE_OUTGOING_ARGS
1227 || args_size.constant
1228 #endif
1230 structure_value_addr = copy_to_reg (structure_value_addr);
1232 /* If this function call is cse'able, precompute all the parameters.
1233 Note that if the parameter is constructed into a temporary, this will
1234 cause an additional copy because the parameter will be constructed
1235 into a temporary location and then copied into the outgoing arguments.
1236 If a parameter contains a call to alloca and this function uses the
1237 stack, precompute the parameter. */
1239 /* If we preallocated the stack space, and some arguments must be passed
1240 on the stack, then we must precompute any parameter which contains a
1241 function call which will store arguments on the stack.
1242 Otherwise, evaluating the parameter may clobber previous parameters
1243 which have already been stored into the stack. */
1245 for (i = 0; i < num_actuals; i++)
1246 if (is_const
1247 || ((args_size.var != 0 || args_size.constant != 0)
1248 && calls_function (args[i].tree_value, 1))
1249 || (must_preallocate && (args_size.var != 0 || args_size.constant != 0)
1250 && calls_function (args[i].tree_value, 0)))
1252 args[i].initial_value = args[i].value
1253 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1255 if (GET_MODE (args[i].value ) != VOIDmode
1256 && GET_MODE (args[i].value) != args[i].mode)
1257 args[i].value = convert_to_mode (args[i].mode, args[i].value,
1258 args[i].unsignedp);
1259 preserve_temp_slots (args[i].value);
1261 free_temp_slots ();
1263 /* ANSI doesn't require a sequence point here,
1264 but PCC has one, so this will avoid some problems. */
1265 emit_queue ();
1268 /* Now we are about to start emitting insns that can be deleted
1269 if a libcall is deleted. */
1270 if (is_const)
1271 start_sequence ();
1273 /* If we have no actual push instructions, or shouldn't use them,
1274 make space for all args right now. */
1276 if (args_size.var != 0)
1278 if (old_stack_level == 0)
1280 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1281 old_pending_adj = pending_stack_adjust;
1282 pending_stack_adjust = 0;
1283 #ifdef ACCUMULATE_OUTGOING_ARGS
1284 /* stack_arg_under_construction says whether a stack arg is
1285 being constructed at the old stack level. Pushing the stack
1286 gets a clean outgoing argument block. */
1287 old_stack_arg_under_construction = stack_arg_under_construction;
1288 stack_arg_under_construction = 0;
1289 #endif
1291 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
1293 else if (must_preallocate)
1295 /* Note that we must go through the motions of allocating an argument
1296 block even if the size is zero because we may be storing args
1297 in the area reserved for register arguments, which may be part of
1298 the stack frame. */
1299 int needed = args_size.constant;
1301 #ifdef ACCUMULATE_OUTGOING_ARGS
1302 /* Store the maximum argument space used. It will be pushed by the
1303 prologue.
1305 Since the stack pointer will never be pushed, it is possible for
1306 the evaluation of a parm to clobber something we have already
1307 written to the stack. Since most function calls on RISC machines
1308 do not use the stack, this is uncommon, but must work correctly.
1310 Therefore, we save any area of the stack that was already written
1311 and that we are using. Here we set up to do this by making a new
1312 stack usage map from the old one. The actual save will be done
1313 by store_one_arg.
1315 Another approach might be to try to reorder the argument
1316 evaluations to avoid this conflicting stack usage. */
1318 if (needed > current_function_outgoing_args_size)
1319 current_function_outgoing_args_size = needed;
1321 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1322 /* Since we will be writing into the entire argument area, the
1323 map must be allocated for its entire size, not just the part that
1324 is the responsibility of the caller. */
1325 needed += reg_parm_stack_space;
1326 #endif
1328 #ifdef ARGS_GROW_DOWNWARD
1329 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
1330 needed + 1);
1331 #else
1332 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use, needed);
1333 #endif
1334 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
1336 if (initial_highest_arg_in_use)
1337 bcopy (initial_stack_usage_map, stack_usage_map,
1338 initial_highest_arg_in_use);
1340 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
1341 bzero (&stack_usage_map[initial_highest_arg_in_use],
1342 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
1343 needed = 0;
1345 /* The address of the outgoing argument list must not be copied to a
1346 register here, because argblock would be left pointing to the
1347 wrong place after the call to allocate_dynamic_stack_space below. */
1349 argblock = virtual_outgoing_args_rtx;
1351 #else /* not ACCUMULATE_OUTGOING_ARGS */
1352 if (inhibit_defer_pop == 0)
1354 /* Try to reuse some or all of the pending_stack_adjust
1355 to get this space. Maybe we can avoid any pushing. */
1356 if (needed > pending_stack_adjust)
1358 needed -= pending_stack_adjust;
1359 pending_stack_adjust = 0;
1361 else
1363 pending_stack_adjust -= needed;
1364 needed = 0;
1367 /* Special case this because overhead of `push_block' in this
1368 case is non-trivial. */
1369 if (needed == 0)
1370 argblock = virtual_outgoing_args_rtx;
1371 else
1372 argblock = push_block (GEN_INT (needed), 0, 0);
1374 /* We only really need to call `copy_to_reg' in the case where push
1375 insns are going to be used to pass ARGBLOCK to a function
1376 call in ARGS. In that case, the stack pointer changes value
1377 from the allocation point to the call point, and hence
1378 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
1379 But might as well always do it. */
1380 argblock = copy_to_reg (argblock);
1381 #endif /* not ACCUMULATE_OUTGOING_ARGS */
1385 #ifdef ACCUMULATE_OUTGOING_ARGS
1386 /* The save/restore code in store_one_arg handles all cases except one:
1387 a constructor call (including a C function returning a BLKmode struct)
1388 to initialize an argument. */
1389 if (stack_arg_under_construction)
1391 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1392 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
1393 #else
1394 rtx push_size = GEN_INT (args_size.constant);
1395 #endif
1396 if (old_stack_level == 0)
1398 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1399 old_pending_adj = pending_stack_adjust;
1400 pending_stack_adjust = 0;
1401 /* stack_arg_under_construction says whether a stack arg is
1402 being constructed at the old stack level. Pushing the stack
1403 gets a clean outgoing argument block. */
1404 old_stack_arg_under_construction = stack_arg_under_construction;
1405 stack_arg_under_construction = 0;
1406 /* Make a new map for the new argument list. */
1407 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
1408 bzero (stack_usage_map, highest_outgoing_arg_in_use);
1409 highest_outgoing_arg_in_use = 0;
1411 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
1413 /* If argument evaluation might modify the stack pointer, copy the
1414 address of the argument list to a register. */
1415 for (i = 0; i < num_actuals; i++)
1416 if (args[i].pass_on_stack)
1418 argblock = copy_addr_to_reg (argblock);
1419 break;
1421 #endif
1424 /* If we preallocated stack space, compute the address of each argument.
1425 We need not ensure it is a valid memory address here; it will be
1426 validized when it is used. */
1427 if (argblock)
1429 rtx arg_reg = argblock;
1430 int arg_offset = 0;
1432 if (GET_CODE (argblock) == PLUS)
1433 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1435 for (i = 0; i < num_actuals; i++)
1437 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1438 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1439 rtx addr;
1441 /* Skip this parm if it will not be passed on the stack. */
1442 if (! args[i].pass_on_stack && args[i].reg != 0)
1443 continue;
1445 if (GET_CODE (offset) == CONST_INT)
1446 addr = plus_constant (arg_reg, INTVAL (offset));
1447 else
1448 addr = gen_rtx (PLUS, Pmode, arg_reg, offset);
1450 addr = plus_constant (addr, arg_offset);
1451 args[i].stack = gen_rtx (MEM, args[i].mode, addr);
1453 if (GET_CODE (slot_offset) == CONST_INT)
1454 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1455 else
1456 addr = gen_rtx (PLUS, Pmode, arg_reg, slot_offset);
1458 addr = plus_constant (addr, arg_offset);
1459 args[i].stack_slot = gen_rtx (MEM, args[i].mode, addr);
1463 #ifdef PUSH_ARGS_REVERSED
1464 #ifdef STACK_BOUNDARY
1465 /* If we push args individually in reverse order, perform stack alignment
1466 before the first push (the last arg). */
1467 if (argblock == 0)
1468 anti_adjust_stack (GEN_INT (args_size.constant
1469 - original_args_size.constant));
1470 #endif
1471 #endif
1473 /* Don't try to defer pops if preallocating, not even from the first arg,
1474 since ARGBLOCK probably refers to the SP. */
1475 if (argblock)
1476 NO_DEFER_POP;
1478 /* Get the function to call, in the form of RTL. */
1479 if (fndecl)
1481 /* If this is the first use of the function, see if we need to
1482 make an external definition for it. */
1483 if (! TREE_USED (fndecl))
1485 assemble_external (fndecl);
1486 TREE_USED (fndecl) = 1;
1489 /* Get a SYMBOL_REF rtx for the function address. */
1490 funexp = XEXP (DECL_RTL (fndecl), 0);
1492 else
1493 /* Generate an rtx (probably a pseudo-register) for the address. */
1495 funexp = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1496 free_temp_slots (); /* FUNEXP can't be BLKmode */
1497 emit_queue ();
1500 /* Figure out the register where the value, if any, will come back. */
1501 valreg = 0;
1502 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
1503 && ! structure_value_addr)
1505 if (pcc_struct_value)
1506 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
1507 fndecl);
1508 else
1509 valreg = hard_function_value (TREE_TYPE (exp), fndecl);
1512 /* Precompute all register parameters. It isn't safe to compute anything
1513 once we have started filling any specific hard regs. */
1514 reg_parm_seen = 0;
1515 for (i = 0; i < num_actuals; i++)
1516 if (args[i].reg != 0 && ! args[i].pass_on_stack)
1518 reg_parm_seen = 1;
1520 if (args[i].value == 0)
1522 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
1523 VOIDmode, 0);
1524 preserve_temp_slots (args[i].value);
1525 free_temp_slots ();
1527 /* ANSI doesn't require a sequence point here,
1528 but PCC has one, so this will avoid some problems. */
1529 emit_queue ();
1532 /* If we are to promote the function arg to a wider mode,
1533 do it now. */
1535 if (GET_MODE (args[i].value) != VOIDmode
1536 && GET_MODE (args[i].value) != args[i].mode)
1537 args[i].value = convert_to_mode (args[i].mode, args[i].value,
1538 args[i].unsignedp);
1541 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1542 /* The argument list is the property of the called routine and it
1543 may clobber it. If the fixed area has been used for previous
1544 parameters, we must save and restore it.
1546 Here we compute the boundary of the that needs to be saved, if any. */
1548 #ifdef ARGS_GROW_DOWNWARD
1549 for (i = 0; i < reg_parm_stack_space + 1; i++)
1550 #else
1551 for (i = 0; i < reg_parm_stack_space; i++)
1552 #endif
1554 if (i >= highest_outgoing_arg_in_use
1555 || stack_usage_map[i] == 0)
1556 continue;
1558 if (low_to_save == -1)
1559 low_to_save = i;
1561 high_to_save = i;
1564 if (low_to_save >= 0)
1566 int num_to_save = high_to_save - low_to_save + 1;
1567 enum machine_mode save_mode
1568 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
1569 rtx stack_area;
1571 /* If we don't have the required alignment, must do this in BLKmode. */
1572 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
1573 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
1574 save_mode = BLKmode;
1576 stack_area = gen_rtx (MEM, save_mode,
1577 memory_address (save_mode,
1579 #ifdef ARGS_GROW_DOWNWARD
1580 plus_constant (argblock,
1581 - high_to_save)
1582 #else
1583 plus_constant (argblock,
1584 low_to_save)
1585 #endif
1587 if (save_mode == BLKmode)
1589 save_area = assign_stack_temp (BLKmode, num_to_save, 1);
1590 emit_block_move (validize_mem (save_area), stack_area,
1591 GEN_INT (num_to_save),
1592 PARM_BOUNDARY / BITS_PER_UNIT);
1594 else
1596 save_area = gen_reg_rtx (save_mode);
1597 emit_move_insn (save_area, stack_area);
1600 #endif
1603 /* Now store (and compute if necessary) all non-register parms.
1604 These come before register parms, since they can require block-moves,
1605 which could clobber the registers used for register parms.
1606 Parms which have partial registers are not stored here,
1607 but we do preallocate space here if they want that. */
1609 for (i = 0; i < num_actuals; i++)
1610 if (args[i].reg == 0 || args[i].pass_on_stack)
1611 store_one_arg (&args[i], argblock, may_be_alloca,
1612 args_size.var != 0, fndecl, reg_parm_stack_space);
1614 #ifdef STRICT_ALIGNMENT
1615 /* If we have a parm that is passed in registers but not in memory
1616 and whose alignment does not permit a direct copy into registers,
1617 make a group of pseudos that correspond to each register that we
1618 will later fill. */
1620 for (i = 0; i < num_actuals; i++)
1621 if (args[i].reg != 0 && ! args[i].pass_on_stack
1622 && args[i].mode == BLKmode
1623 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1624 < MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1626 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1627 int big_endian_correction = 0;
1629 args[i].n_aligned_regs
1630 = args[i].partial ? args[i].partial
1631 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1633 args[i].aligned_regs = (rtx *) alloca (sizeof (rtx)
1634 * args[i].n_aligned_regs);
1636 /* Structures smaller than a word are aligned to the least signifcant
1637 byte (to the right). On a BYTES_BIG_ENDIAN machine, this means we
1638 must skip the empty high order bytes when calculating the bit
1639 offset. */
1640 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
1641 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
1643 for (j = 0; j < args[i].n_aligned_regs; j++)
1645 rtx reg = gen_reg_rtx (word_mode);
1646 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1647 int bitsize = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
1648 int bitpos;
1650 args[i].aligned_regs[j] = reg;
1652 /* Clobber REG and move each partword into it. Ensure we don't
1653 go past the end of the structure. Note that the loop below
1654 works because we've already verified that padding
1655 and endianness are compatible. */
1657 emit_insn (gen_rtx (CLOBBER, VOIDmode, reg));
1659 for (bitpos = 0;
1660 bitpos < BITS_PER_WORD && bytes > 0;
1661 bitpos += bitsize, bytes -= bitsize / BITS_PER_UNIT)
1663 int xbitpos = bitpos + big_endian_correction;
1665 store_bit_field (reg, bitsize, xbitpos, word_mode,
1666 extract_bit_field (word, bitsize, bitpos, 1,
1667 NULL_RTX, word_mode,
1668 word_mode,
1669 bitsize / BITS_PER_UNIT,
1670 BITS_PER_WORD),
1671 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
1675 #endif
1677 /* Now store any partially-in-registers parm.
1678 This is the last place a block-move can happen. */
1679 if (reg_parm_seen)
1680 for (i = 0; i < num_actuals; i++)
1681 if (args[i].partial != 0 && ! args[i].pass_on_stack)
1682 store_one_arg (&args[i], argblock, may_be_alloca,
1683 args_size.var != 0, fndecl, reg_parm_stack_space);
1685 #ifndef PUSH_ARGS_REVERSED
1686 #ifdef STACK_BOUNDARY
1687 /* If we pushed args in forward order, perform stack alignment
1688 after pushing the last arg. */
1689 if (argblock == 0)
1690 anti_adjust_stack (GEN_INT (args_size.constant
1691 - original_args_size.constant));
1692 #endif
1693 #endif
1695 /* If register arguments require space on the stack and stack space
1696 was not preallocated, allocate stack space here for arguments
1697 passed in registers. */
1698 #if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
1699 if (must_preallocate == 0 && reg_parm_stack_space > 0)
1700 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
1701 #endif
1703 /* Pass the function the address in which to return a structure value. */
1704 if (structure_value_addr && ! structure_value_addr_parm)
1706 emit_move_insn (struct_value_rtx,
1707 force_reg (Pmode,
1708 force_operand (structure_value_addr,
1709 NULL_RTX)));
1710 if (GET_CODE (struct_value_rtx) == REG)
1712 push_to_sequence (use_insns);
1713 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
1714 use_insns = get_insns ();
1715 end_sequence ();
1719 /* Now do the register loads required for any wholly-register parms or any
1720 parms which are passed both on the stack and in a register. Their
1721 expressions were already evaluated.
1723 Mark all register-parms as living through the call, putting these USE
1724 insns in a list headed by USE_INSNS. */
1726 for (i = 0; i < num_actuals; i++)
1728 rtx list = args[i].reg;
1729 int partial = args[i].partial;
1731 while (list)
1733 rtx reg;
1734 int nregs;
1736 /* Process each register that needs to get this arg. */
1737 if (GET_CODE (list) == EXPR_LIST)
1738 reg = XEXP (list, 0), list = XEXP (list, 1);
1739 else
1740 reg = list, list = 0;
1742 /* Set to non-zero if must move a word at a time, even if just one
1743 word (e.g, partial == 1 && mode == DFmode). Set to zero if
1744 we just use a normal move insn. */
1745 nregs = (partial ? partial
1746 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1747 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1748 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1749 : 0));
1751 /* If simple case, just do move. If normal partial, store_one_arg
1752 has already loaded the register for us. In all other cases,
1753 load the register(s) from memory. */
1755 if (nregs == 0)
1756 emit_move_insn (reg, args[i].value);
1758 #ifdef STRICT_ALIGNMENT
1759 /* If we have pre-computed the values to put in the registers in
1760 the case of non-aligned structures, copy them in now. */
1762 else if (args[i].n_aligned_regs != 0)
1763 for (j = 0; j < args[i].n_aligned_regs; j++)
1764 emit_move_insn (gen_rtx (REG, word_mode, REGNO (reg) + j),
1765 args[i].aligned_regs[j]);
1766 #endif
1768 else if (args[i].partial == 0 || args[i].pass_on_stack)
1769 move_block_to_reg (REGNO (reg),
1770 validize_mem (args[i].value), nregs,
1771 args[i].mode);
1773 push_to_sequence (use_insns);
1774 if (nregs == 0)
1775 emit_insn (gen_rtx (USE, VOIDmode, reg));
1776 else
1777 use_regs (REGNO (reg), nregs);
1778 use_insns = get_insns ();
1779 end_sequence ();
1781 /* PARTIAL referred only to the first register, so clear it for the
1782 next time. */
1783 partial = 0;
1787 /* Perform postincrements before actually calling the function. */
1788 emit_queue ();
1790 /* All arguments and registers used for the call must be set up by now! */
1792 funexp = prepare_call_address (funexp, fndecl, &use_insns);
1794 /* Generate the actual call instruction. */
1795 emit_call_1 (funexp, funtype, args_size.constant, struct_value_size,
1796 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
1797 valreg, old_inhibit_defer_pop, use_insns, is_const);
1799 /* If call is cse'able, make appropriate pair of reg-notes around it.
1800 Test valreg so we don't crash; may safely ignore `const'
1801 if return type is void. */
1802 if (is_const && valreg != 0)
1804 rtx note = 0;
1805 rtx temp = gen_reg_rtx (GET_MODE (valreg));
1806 rtx insns;
1808 /* Construct an "equal form" for the value which mentions all the
1809 arguments in order as well as the function name. */
1810 #ifdef PUSH_ARGS_REVERSED
1811 for (i = 0; i < num_actuals; i++)
1812 note = gen_rtx (EXPR_LIST, VOIDmode, args[i].initial_value, note);
1813 #else
1814 for (i = num_actuals - 1; i >= 0; i--)
1815 note = gen_rtx (EXPR_LIST, VOIDmode, args[i].initial_value, note);
1816 #endif
1817 note = gen_rtx (EXPR_LIST, VOIDmode, funexp, note);
1819 insns = get_insns ();
1820 end_sequence ();
1822 emit_libcall_block (insns, temp, valreg, note);
1824 valreg = temp;
1827 /* For calls to `setjmp', etc., inform flow.c it should complain
1828 if nonvolatile values are live. */
1830 if (returns_twice)
1832 emit_note (name, NOTE_INSN_SETJMP);
1833 current_function_calls_setjmp = 1;
1836 if (is_longjmp)
1837 current_function_calls_longjmp = 1;
1839 /* Notice functions that cannot return.
1840 If optimizing, insns emitted below will be dead.
1841 If not optimizing, they will exist, which is useful
1842 if the user uses the `return' command in the debugger. */
1844 if (is_volatile || is_longjmp)
1845 emit_barrier ();
1847 /* If value type not void, return an rtx for the value. */
1849 /* If there are cleanups to be called, don't use a hard reg as target. */
1850 if (cleanups_this_call != old_cleanups
1851 && target && REG_P (target)
1852 && REGNO (target) < FIRST_PSEUDO_REGISTER)
1853 target = 0;
1855 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
1856 || ignore)
1858 target = const0_rtx;
1860 else if (structure_value_addr)
1862 if (target == 0 || GET_CODE (target) != MEM)
1864 target = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)),
1865 memory_address (TYPE_MODE (TREE_TYPE (exp)),
1866 structure_value_addr));
1867 MEM_IN_STRUCT_P (target)
1868 = (TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
1869 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
1870 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
1871 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE);
1874 else if (pcc_struct_value)
1876 if (target == 0)
1878 /* We used leave the value in the location that it is
1879 returned in, but that causes problems if it is used more
1880 than once in one expression. Rather than trying to track
1881 when a copy is required, we always copy when TARGET is
1882 not specified. This calling sequence is only used on
1883 a few machines and TARGET is usually nonzero. */
1884 if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
1886 target = assign_stack_temp (BLKmode,
1887 int_size_in_bytes (TREE_TYPE (exp)),
1890 /* Save this temp slot around the pop below. */
1891 preserve_temp_slots (target);
1893 else
1894 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
1897 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1898 emit_move_insn (target, gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)),
1899 copy_to_reg (valreg)));
1900 else
1901 emit_block_move (target, gen_rtx (MEM, BLKmode, copy_to_reg (valreg)),
1902 expr_size (exp),
1903 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
1905 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
1906 && GET_MODE (target) == GET_MODE (valreg))
1907 /* TARGET and VALREG cannot be equal at this point because the latter
1908 would not have REG_FUNCTION_VALUE_P true, while the former would if
1909 it were referring to the same register.
1911 If they refer to the same register, this move will be a no-op, except
1912 when function inlining is being done. */
1913 emit_move_insn (target, valreg);
1914 else
1915 target = copy_to_reg (valreg);
1917 #ifdef PROMOTE_FUNCTION_RETURN
1918 /* If we promoted this return value, make the proper SUBREG. TARGET
1919 might be const0_rtx here, so be careful. */
1920 if (GET_CODE (target) == REG
1921 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
1923 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
1924 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
1926 if (TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE
1927 || TREE_CODE (TREE_TYPE (exp)) == ENUMERAL_TYPE
1928 || TREE_CODE (TREE_TYPE (exp)) == BOOLEAN_TYPE
1929 || TREE_CODE (TREE_TYPE (exp)) == CHAR_TYPE
1930 || TREE_CODE (TREE_TYPE (exp)) == REAL_TYPE
1931 || TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE
1932 || TREE_CODE (TREE_TYPE (exp)) == OFFSET_TYPE)
1934 PROMOTE_MODE (mode, unsignedp, TREE_TYPE (exp));
1937 /* If we didn't promote as expected, something is wrong. */
1938 if (mode != GET_MODE (target))
1939 abort ();
1941 target = gen_rtx (SUBREG, TYPE_MODE (TREE_TYPE (exp)), target, 0);
1942 SUBREG_PROMOTED_VAR_P (target) = 1;
1943 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
1945 #endif
1947 /* Perform all cleanups needed for the arguments of this call
1948 (i.e. destructors in C++). */
1949 expand_cleanups_to (old_cleanups);
1951 /* If size of args is variable or this was a constructor call for a stack
1952 argument, restore saved stack-pointer value. */
1954 if (old_stack_level)
1956 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1957 pending_stack_adjust = old_pending_adj;
1958 #ifdef ACCUMULATE_OUTGOING_ARGS
1959 stack_arg_under_construction = old_stack_arg_under_construction;
1960 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
1961 stack_usage_map = initial_stack_usage_map;
1962 #endif
1964 #ifdef ACCUMULATE_OUTGOING_ARGS
1965 else
1967 #ifdef REG_PARM_STACK_SPACE
1968 if (save_area)
1970 enum machine_mode save_mode = GET_MODE (save_area);
1971 rtx stack_area
1972 = gen_rtx (MEM, save_mode,
1973 memory_address (save_mode,
1974 #ifdef ARGS_GROW_DOWNWARD
1975 plus_constant (argblock, - high_to_save)
1976 #else
1977 plus_constant (argblock, low_to_save)
1978 #endif
1981 if (save_mode != BLKmode)
1982 emit_move_insn (stack_area, save_area);
1983 else
1984 emit_block_move (stack_area, validize_mem (save_area),
1985 GEN_INT (high_to_save - low_to_save + 1),
1986 PARM_BOUNDARY / BITS_PER_UNIT);
1988 #endif
1990 /* If we saved any argument areas, restore them. */
1991 for (i = 0; i < num_actuals; i++)
1992 if (args[i].save_area)
1994 enum machine_mode save_mode = GET_MODE (args[i].save_area);
1995 rtx stack_area
1996 = gen_rtx (MEM, save_mode,
1997 memory_address (save_mode,
1998 XEXP (args[i].stack_slot, 0)));
2000 if (save_mode != BLKmode)
2001 emit_move_insn (stack_area, args[i].save_area);
2002 else
2003 emit_block_move (stack_area, validize_mem (args[i].save_area),
2004 GEN_INT (args[i].size.constant),
2005 PARM_BOUNDARY / BITS_PER_UNIT);
2008 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2009 stack_usage_map = initial_stack_usage_map;
2011 #endif
2013 /* If this was alloca, record the new stack level for nonlocal gotos.
2014 Check for the handler slots since we might not have a save area
2015 for non-local gotos. */
2017 if (may_be_alloca && nonlocal_goto_handler_slot != 0)
2018 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
2020 pop_temp_slots ();
2022 return target;
2025 /* Output a library call to function FUN (a SYMBOL_REF rtx)
2026 (emitting the queue unless NO_QUEUE is nonzero),
2027 for a value of mode OUTMODE,
2028 with NARGS different arguments, passed as alternating rtx values
2029 and machine_modes to convert them to.
2030 The rtx values should have been passed through protect_from_queue already.
2032 NO_QUEUE will be true if and only if the library call is a `const' call
2033 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2034 to the variable is_const in expand_call.
2036 NO_QUEUE must be true for const calls, because if it isn't, then
2037 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2038 and will be lost if the libcall sequence is optimized away.
2040 NO_QUEUE must be false for non-const calls, because if it isn't, the
2041 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2042 optimized. For instance, the instruction scheduler may incorrectly
2043 move memory references across the non-const call. */
2045 void
2046 emit_library_call (va_alist)
2047 va_dcl
2049 va_list p;
2050 /* Total size in bytes of all the stack-parms scanned so far. */
2051 struct args_size args_size;
2052 /* Size of arguments before any adjustments (such as rounding). */
2053 struct args_size original_args_size;
2054 register int argnum;
2055 enum machine_mode outmode;
2056 int nargs;
2057 rtx fun;
2058 rtx orgfun;
2059 int inc;
2060 int count;
2061 rtx argblock = 0;
2062 CUMULATIVE_ARGS args_so_far;
2063 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2064 struct args_size offset; struct args_size size; };
2065 struct arg *argvec;
2066 int old_inhibit_defer_pop = inhibit_defer_pop;
2067 int no_queue = 0;
2068 rtx use_insns;
2069 /* library calls are never indirect calls. */
2070 int current_call_is_indirect = 0;
2072 va_start (p);
2073 orgfun = fun = va_arg (p, rtx);
2074 no_queue = va_arg (p, int);
2075 outmode = va_arg (p, enum machine_mode);
2076 nargs = va_arg (p, int);
2078 /* Copy all the libcall-arguments out of the varargs data
2079 and into a vector ARGVEC.
2081 Compute how to pass each argument. We only support a very small subset
2082 of the full argument passing conventions to limit complexity here since
2083 library functions shouldn't have many args. */
2085 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2087 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun);
2089 args_size.constant = 0;
2090 args_size.var = 0;
2092 for (count = 0; count < nargs; count++)
2094 rtx val = va_arg (p, rtx);
2095 enum machine_mode mode = va_arg (p, enum machine_mode);
2097 /* We cannot convert the arg value to the mode the library wants here;
2098 must do it earlier where we know the signedness of the arg. */
2099 if (mode == BLKmode
2100 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2101 abort ();
2103 /* On some machines, there's no way to pass a float to a library fcn.
2104 Pass it as a double instead. */
2105 #ifdef LIBGCC_NEEDS_DOUBLE
2106 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2107 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
2108 #endif
2110 /* There's no need to call protect_from_queue, because
2111 either emit_move_insn or emit_push_insn will do that. */
2113 /* Make sure it is a reasonable operand for a move or push insn. */
2114 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2115 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2116 val = force_operand (val, NULL_RTX);
2118 argvec[count].value = val;
2119 argvec[count].mode = mode;
2121 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2122 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2123 abort ();
2124 #endif
2126 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2127 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2128 abort ();
2129 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2130 argvec[count].partial
2131 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2132 #else
2133 argvec[count].partial = 0;
2134 #endif
2136 locate_and_pad_parm (mode, NULL_TREE,
2137 argvec[count].reg && argvec[count].partial == 0,
2138 NULL_TREE, &args_size, &argvec[count].offset,
2139 &argvec[count].size);
2141 if (argvec[count].size.var)
2142 abort ();
2144 #ifndef REG_PARM_STACK_SPACE
2145 if (argvec[count].partial)
2146 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2147 #endif
2149 if (argvec[count].reg == 0 || argvec[count].partial != 0
2150 #ifdef REG_PARM_STACK_SPACE
2151 || 1
2152 #endif
2154 args_size.constant += argvec[count].size.constant;
2156 #ifdef ACCUMULATE_OUTGOING_ARGS
2157 /* If this arg is actually passed on the stack, it might be
2158 clobbering something we already put there (this library call might
2159 be inside the evaluation of an argument to a function whose call
2160 requires the stack). This will only occur when the library call
2161 has sufficient args to run out of argument registers. Abort in
2162 this case; if this ever occurs, code must be added to save and
2163 restore the arg slot. */
2165 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2166 abort ();
2167 #endif
2169 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2171 va_end (p);
2173 /* If this machine requires an external definition for library
2174 functions, write one out. */
2175 assemble_external_libcall (fun);
2177 original_args_size = args_size;
2178 #ifdef STACK_BOUNDARY
2179 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2180 / STACK_BYTES) * STACK_BYTES);
2181 #endif
2183 #ifdef REG_PARM_STACK_SPACE
2184 args_size.constant = MAX (args_size.constant,
2185 REG_PARM_STACK_SPACE (NULL_TREE));
2186 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2187 args_size.constant -= REG_PARM_STACK_SPACE (NULL_TREE);
2188 #endif
2189 #endif
2191 #ifdef ACCUMULATE_OUTGOING_ARGS
2192 if (args_size.constant > current_function_outgoing_args_size)
2193 current_function_outgoing_args_size = args_size.constant;
2194 args_size.constant = 0;
2195 #endif
2197 #ifndef PUSH_ROUNDING
2198 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2199 #endif
2201 #ifdef PUSH_ARGS_REVERSED
2202 #ifdef STACK_BOUNDARY
2203 /* If we push args individually in reverse order, perform stack alignment
2204 before the first push (the last arg). */
2205 if (argblock == 0)
2206 anti_adjust_stack (GEN_INT (args_size.constant
2207 - original_args_size.constant));
2208 #endif
2209 #endif
2211 #ifdef PUSH_ARGS_REVERSED
2212 inc = -1;
2213 argnum = nargs - 1;
2214 #else
2215 inc = 1;
2216 argnum = 0;
2217 #endif
2219 /* Push the args that need to be pushed. */
2221 for (count = 0; count < nargs; count++, argnum += inc)
2223 register enum machine_mode mode = argvec[argnum].mode;
2224 register rtx val = argvec[argnum].value;
2225 rtx reg = argvec[argnum].reg;
2226 int partial = argvec[argnum].partial;
2228 if (! (reg != 0 && partial == 0))
2229 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2230 argblock, GEN_INT (argvec[count].offset.constant));
2231 NO_DEFER_POP;
2234 #ifndef PUSH_ARGS_REVERSED
2235 #ifdef STACK_BOUNDARY
2236 /* If we pushed args in forward order, perform stack alignment
2237 after pushing the last arg. */
2238 if (argblock == 0)
2239 anti_adjust_stack (GEN_INT (args_size.constant
2240 - original_args_size.constant));
2241 #endif
2242 #endif
2244 #ifdef PUSH_ARGS_REVERSED
2245 argnum = nargs - 1;
2246 #else
2247 argnum = 0;
2248 #endif
2250 /* Now load any reg parms into their regs. */
2252 for (count = 0; count < nargs; count++, argnum += inc)
2254 register enum machine_mode mode = argvec[argnum].mode;
2255 register rtx val = argvec[argnum].value;
2256 rtx reg = argvec[argnum].reg;
2257 int partial = argvec[argnum].partial;
2259 if (reg != 0 && partial == 0)
2260 emit_move_insn (reg, val);
2261 NO_DEFER_POP;
2264 /* For version 1.37, try deleting this entirely. */
2265 if (! no_queue)
2266 emit_queue ();
2268 /* Any regs containing parms remain in use through the call. */
2269 start_sequence ();
2270 for (count = 0; count < nargs; count++)
2271 if (argvec[count].reg != 0)
2272 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2274 use_insns = get_insns ();
2275 end_sequence ();
2277 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2279 /* Don't allow popping to be deferred, since then
2280 cse'ing of library calls could delete a call and leave the pop. */
2281 NO_DEFER_POP;
2283 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2284 will set inhibit_defer_pop to that value. */
2286 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2287 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2288 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2289 old_inhibit_defer_pop + 1, use_insns, no_queue);
2291 /* Now restore inhibit_defer_pop to its actual original value. */
2292 OK_DEFER_POP;
2295 /* Like emit_library_call except that an extra argument, VALUE,
2296 comes second and says where to store the result.
2297 (If VALUE is zero, this function chooses a convenient way
2298 to return the value.
2300 This function returns an rtx for where the value is to be found.
2301 If VALUE is nonzero, VALUE is returned. */
2304 emit_library_call_value (va_alist)
2305 va_dcl
2307 va_list p;
2308 /* Total size in bytes of all the stack-parms scanned so far. */
2309 struct args_size args_size;
2310 /* Size of arguments before any adjustments (such as rounding). */
2311 struct args_size original_args_size;
2312 register int argnum;
2313 enum machine_mode outmode;
2314 int nargs;
2315 rtx fun;
2316 rtx orgfun;
2317 int inc;
2318 int count;
2319 rtx argblock = 0;
2320 CUMULATIVE_ARGS args_so_far;
2321 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2322 struct args_size offset; struct args_size size; };
2323 struct arg *argvec;
2324 int old_inhibit_defer_pop = inhibit_defer_pop;
2325 int no_queue = 0;
2326 rtx use_insns;
2327 rtx value;
2328 rtx mem_value = 0;
2329 int pcc_struct_value = 0;
2330 /* library calls are never indirect calls. */
2331 int current_call_is_indirect = 0;
2333 va_start (p);
2334 orgfun = fun = va_arg (p, rtx);
2335 value = va_arg (p, rtx);
2336 no_queue = va_arg (p, int);
2337 outmode = va_arg (p, enum machine_mode);
2338 nargs = va_arg (p, int);
2340 /* If this kind of value comes back in memory,
2341 decide where in memory it should come back. */
2342 if (aggregate_value_p (type_for_mode (outmode, 0)))
2344 #ifdef PCC_STATIC_STRUCT_RETURN
2345 rtx pointer_reg
2346 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
2348 mem_value = gen_rtx (MEM, outmode, pointer_reg);
2349 pcc_struct_value = 1;
2350 if (value == 0)
2351 value = gen_reg_rtx (outmode);
2352 #else /* not PCC_STATIC_STRUCT_RETURN */
2353 if (value != 0 && GET_CODE (value) == MEM)
2354 mem_value = value;
2355 else
2356 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
2357 #endif
2360 /* ??? Unfinished: must pass the memory address as an argument. */
2362 /* Copy all the libcall-arguments out of the varargs data
2363 and into a vector ARGVEC.
2365 Compute how to pass each argument. We only support a very small subset
2366 of the full argument passing conventions to limit complexity here since
2367 library functions shouldn't have many args. */
2369 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
2371 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun);
2373 args_size.constant = 0;
2374 args_size.var = 0;
2376 count = 0;
2378 /* If there's a structure value address to be passed,
2379 either pass it in the special place, or pass it as an extra argument. */
2380 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
2382 rtx addr = XEXP (mem_value, 0);
2383 nargs++;
2385 /* Make sure it is a reasonable operand for a move or push insn. */
2386 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
2387 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
2388 addr = force_operand (addr, NULL_RTX);
2390 argvec[count].value = addr;
2391 argvec[count].mode = outmode;
2392 argvec[count].partial = 0;
2394 argvec[count].reg = FUNCTION_ARG (args_so_far, outmode, NULL_TREE, 1);
2395 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2396 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, outmode, NULL_TREE, 1))
2397 abort ();
2398 #endif
2400 locate_and_pad_parm (outmode, NULL_TREE,
2401 argvec[count].reg && argvec[count].partial == 0,
2402 NULL_TREE, &args_size, &argvec[count].offset,
2403 &argvec[count].size);
2406 if (argvec[count].reg == 0 || argvec[count].partial != 0
2407 #ifdef REG_PARM_STACK_SPACE
2408 || 1
2409 #endif
2411 args_size.constant += argvec[count].size.constant;
2413 FUNCTION_ARG_ADVANCE (args_so_far, outmode, (tree)0, 1);
2415 count++;
2418 for (; count < nargs; count++)
2420 rtx val = va_arg (p, rtx);
2421 enum machine_mode mode = va_arg (p, enum machine_mode);
2423 /* We cannot convert the arg value to the mode the library wants here;
2424 must do it earlier where we know the signedness of the arg. */
2425 if (mode == BLKmode
2426 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2427 abort ();
2429 /* On some machines, there's no way to pass a float to a library fcn.
2430 Pass it as a double instead. */
2431 #ifdef LIBGCC_NEEDS_DOUBLE
2432 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2433 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
2434 #endif
2436 /* There's no need to call protect_from_queue, because
2437 either emit_move_insn or emit_push_insn will do that. */
2439 /* Make sure it is a reasonable operand for a move or push insn. */
2440 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2441 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2442 val = force_operand (val, NULL_RTX);
2444 argvec[count].value = val;
2445 argvec[count].mode = mode;
2447 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2448 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2449 abort ();
2450 #endif
2452 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2453 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2454 abort ();
2455 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2456 argvec[count].partial
2457 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2458 #else
2459 argvec[count].partial = 0;
2460 #endif
2462 locate_and_pad_parm (mode, NULL_TREE,
2463 argvec[count].reg && argvec[count].partial == 0,
2464 NULL_TREE, &args_size, &argvec[count].offset,
2465 &argvec[count].size);
2467 if (argvec[count].size.var)
2468 abort ();
2470 #ifndef REG_PARM_STACK_SPACE
2471 if (argvec[count].partial)
2472 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2473 #endif
2475 if (argvec[count].reg == 0 || argvec[count].partial != 0
2476 #ifdef REG_PARM_STACK_SPACE
2477 || 1
2478 #endif
2480 args_size.constant += argvec[count].size.constant;
2482 #ifdef ACCUMULATE_OUTGOING_ARGS
2483 /* If this arg is actually passed on the stack, it might be
2484 clobbering something we already put there (this library call might
2485 be inside the evaluation of an argument to a function whose call
2486 requires the stack). This will only occur when the library call
2487 has sufficient args to run out of argument registers. Abort in
2488 this case; if this ever occurs, code must be added to save and
2489 restore the arg slot. */
2491 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2492 abort ();
2493 #endif
2495 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2497 va_end (p);
2499 /* If this machine requires an external definition for library
2500 functions, write one out. */
2501 assemble_external_libcall (fun);
2503 original_args_size = args_size;
2504 #ifdef STACK_BOUNDARY
2505 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2506 / STACK_BYTES) * STACK_BYTES);
2507 #endif
2509 #ifdef REG_PARM_STACK_SPACE
2510 args_size.constant = MAX (args_size.constant,
2511 REG_PARM_STACK_SPACE (NULL_TREE));
2512 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2513 args_size.constant -= REG_PARM_STACK_SPACE (NULL_TREE);
2514 #endif
2515 #endif
2517 #ifdef ACCUMULATE_OUTGOING_ARGS
2518 if (args_size.constant > current_function_outgoing_args_size)
2519 current_function_outgoing_args_size = args_size.constant;
2520 args_size.constant = 0;
2521 #endif
2523 #ifndef PUSH_ROUNDING
2524 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2525 #endif
2527 #ifdef PUSH_ARGS_REVERSED
2528 #ifdef STACK_BOUNDARY
2529 /* If we push args individually in reverse order, perform stack alignment
2530 before the first push (the last arg). */
2531 if (argblock == 0)
2532 anti_adjust_stack (GEN_INT (args_size.constant
2533 - original_args_size.constant));
2534 #endif
2535 #endif
2537 #ifdef PUSH_ARGS_REVERSED
2538 inc = -1;
2539 argnum = nargs - 1;
2540 #else
2541 inc = 1;
2542 argnum = 0;
2543 #endif
2545 /* Push the args that need to be pushed. */
2547 for (count = 0; count < nargs; count++, argnum += inc)
2549 register enum machine_mode mode = argvec[argnum].mode;
2550 register rtx val = argvec[argnum].value;
2551 rtx reg = argvec[argnum].reg;
2552 int partial = argvec[argnum].partial;
2554 if (! (reg != 0 && partial == 0))
2555 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2556 argblock, GEN_INT (argvec[count].offset.constant));
2557 NO_DEFER_POP;
2560 #ifndef PUSH_ARGS_REVERSED
2561 #ifdef STACK_BOUNDARY
2562 /* If we pushed args in forward order, perform stack alignment
2563 after pushing the last arg. */
2564 if (argblock == 0)
2565 anti_adjust_stack (GEN_INT (args_size.constant
2566 - original_args_size.constant));
2567 #endif
2568 #endif
2570 #ifdef PUSH_ARGS_REVERSED
2571 argnum = nargs - 1;
2572 #else
2573 argnum = 0;
2574 #endif
2576 /* Now load any reg parms into their regs. */
2578 for (count = 0; count < nargs; count++, argnum += inc)
2580 register enum machine_mode mode = argvec[argnum].mode;
2581 register rtx val = argvec[argnum].value;
2582 rtx reg = argvec[argnum].reg;
2583 int partial = argvec[argnum].partial;
2585 if (reg != 0 && partial == 0)
2586 emit_move_insn (reg, val);
2587 NO_DEFER_POP;
2590 #if 0
2591 /* For version 1.37, try deleting this entirely. */
2592 if (! no_queue)
2593 emit_queue ();
2594 #endif
2596 /* Any regs containing parms remain in use through the call. */
2597 start_sequence ();
2598 for (count = 0; count < nargs; count++)
2599 if (argvec[count].reg != 0)
2600 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2602 use_insns = get_insns ();
2603 end_sequence ();
2605 /* Pass the function the address in which to return a structure value. */
2606 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
2608 emit_move_insn (struct_value_rtx,
2609 force_reg (Pmode,
2610 force_operand (XEXP (mem_value, 0),
2611 NULL_RTX)));
2612 if (GET_CODE (struct_value_rtx) == REG)
2614 push_to_sequence (use_insns);
2615 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
2616 use_insns = get_insns ();
2617 end_sequence ();
2621 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2623 /* Don't allow popping to be deferred, since then
2624 cse'ing of library calls could delete a call and leave the pop. */
2625 NO_DEFER_POP;
2627 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2628 will set inhibit_defer_pop to that value. */
2630 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2631 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2632 (outmode != VOIDmode && mem_value == 0
2633 ? hard_libcall_value (outmode) : NULL_RTX),
2634 old_inhibit_defer_pop + 1, use_insns, no_queue);
2636 /* Now restore inhibit_defer_pop to its actual original value. */
2637 OK_DEFER_POP;
2639 /* Copy the value to the right place. */
2640 if (outmode != VOIDmode)
2642 if (mem_value)
2644 if (value == 0)
2645 value = mem_value;
2646 if (value != mem_value)
2647 emit_move_insn (value, mem_value);
2649 else if (value != 0)
2650 emit_move_insn (value, hard_libcall_value (outmode));
2651 else
2652 value = hard_libcall_value (outmode);
2655 return value;
2658 #if 0
2659 /* Return an rtx which represents a suitable home on the stack
2660 given TYPE, the type of the argument looking for a home.
2661 This is called only for BLKmode arguments.
2663 SIZE is the size needed for this target.
2664 ARGS_ADDR is the address of the bottom of the argument block for this call.
2665 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
2666 if this machine uses push insns. */
2668 static rtx
2669 target_for_arg (type, size, args_addr, offset)
2670 tree type;
2671 rtx size;
2672 rtx args_addr;
2673 struct args_size offset;
2675 rtx target;
2676 rtx offset_rtx = ARGS_SIZE_RTX (offset);
2678 /* We do not call memory_address if possible,
2679 because we want to address as close to the stack
2680 as possible. For non-variable sized arguments,
2681 this will be stack-pointer relative addressing. */
2682 if (GET_CODE (offset_rtx) == CONST_INT)
2683 target = plus_constant (args_addr, INTVAL (offset_rtx));
2684 else
2686 /* I have no idea how to guarantee that this
2687 will work in the presence of register parameters. */
2688 target = gen_rtx (PLUS, Pmode, args_addr, offset_rtx);
2689 target = memory_address (QImode, target);
2692 return gen_rtx (MEM, BLKmode, target);
2694 #endif
2696 /* Store a single argument for a function call
2697 into the register or memory area where it must be passed.
2698 *ARG describes the argument value and where to pass it.
2700 ARGBLOCK is the address of the stack-block for all the arguments,
2701 or 0 on a machine where arguments are pushed individually.
2703 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
2704 so must be careful about how the stack is used.
2706 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
2707 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
2708 that we need not worry about saving and restoring the stack.
2710 FNDECL is the declaration of the function we are calling. */
2712 static void
2713 store_one_arg (arg, argblock, may_be_alloca, variable_size, fndecl,
2714 reg_parm_stack_space)
2715 struct arg_data *arg;
2716 rtx argblock;
2717 int may_be_alloca;
2718 int variable_size;
2719 tree fndecl;
2720 int reg_parm_stack_space;
2722 register tree pval = arg->tree_value;
2723 rtx reg = 0;
2724 int partial = 0;
2725 int used = 0;
2726 int i, lower_bound, upper_bound;
2728 if (TREE_CODE (pval) == ERROR_MARK)
2729 return;
2731 #ifdef ACCUMULATE_OUTGOING_ARGS
2732 /* If this is being stored into a pre-allocated, fixed-size, stack area,
2733 save any previous data at that location. */
2734 if (argblock && ! variable_size && arg->stack)
2736 #ifdef ARGS_GROW_DOWNWARD
2737 /* stack_slot is negative, but we want to index stack_usage_map */
2738 /* with positive values. */
2739 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
2740 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
2741 else
2742 abort ();
2744 lower_bound = upper_bound - arg->size.constant;
2745 #else
2746 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
2747 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
2748 else
2749 lower_bound = 0;
2751 upper_bound = lower_bound + arg->size.constant;
2752 #endif
2754 for (i = lower_bound; i < upper_bound; i++)
2755 if (stack_usage_map[i]
2756 #ifdef REG_PARM_STACK_SPACE
2757 /* Don't store things in the fixed argument area at this point;
2758 it has already been saved. */
2759 && i > reg_parm_stack_space
2760 #endif
2762 break;
2764 if (i != upper_bound)
2766 /* We need to make a save area. See what mode we can make it. */
2767 enum machine_mode save_mode
2768 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
2769 rtx stack_area
2770 = gen_rtx (MEM, save_mode,
2771 memory_address (save_mode, XEXP (arg->stack_slot, 0)));
2773 if (save_mode == BLKmode)
2775 arg->save_area = assign_stack_temp (BLKmode,
2776 arg->size.constant, 1);
2777 emit_block_move (validize_mem (arg->save_area), stack_area,
2778 GEN_INT (arg->size.constant),
2779 PARM_BOUNDARY / BITS_PER_UNIT);
2781 else
2783 arg->save_area = gen_reg_rtx (save_mode);
2784 emit_move_insn (arg->save_area, stack_area);
2788 #endif
2790 /* If this isn't going to be placed on both the stack and in registers,
2791 set up the register and number of words. */
2792 if (! arg->pass_on_stack)
2793 reg = arg->reg, partial = arg->partial;
2795 if (reg != 0 && partial == 0)
2796 /* Being passed entirely in a register. We shouldn't be called in
2797 this case. */
2798 abort ();
2800 #ifdef STRICT_ALIGNMENT
2801 /* If this arg needs special alignment, don't load the registers
2802 here. */
2803 if (arg->n_aligned_regs != 0)
2804 reg = 0;
2805 #endif
2807 /* If this is being partially passed in a register, but multiple locations
2808 are specified, we assume that the one partially used is the one that is
2809 listed first. */
2810 if (reg && GET_CODE (reg) == EXPR_LIST)
2811 reg = XEXP (reg, 0);
2813 /* If this is being passed partially in a register, we can't evaluate
2814 it directly into its stack slot. Otherwise, we can. */
2815 if (arg->value == 0)
2817 #ifdef ACCUMULATE_OUTGOING_ARGS
2818 /* stack_arg_under_construction is nonzero if a function argument is
2819 being evaluated directly into the outgoing argument list and
2820 expand_call must take special action to preserve the argument list
2821 if it is called recursively.
2823 For scalar function arguments stack_usage_map is sufficient to
2824 determine which stack slots must be saved and restored. Scalar
2825 arguments in general have pass_on_stack == 0.
2827 If this argument is initialized by a function which takes the
2828 address of the argument (a C++ constructor or a C function
2829 returning a BLKmode structure), then stack_usage_map is
2830 insufficient and expand_call must push the stack around the
2831 function call. Such arguments have pass_on_stack == 1.
2833 Note that it is always safe to set stack_arg_under_construction,
2834 but this generates suboptimal code if set when not needed. */
2836 if (arg->pass_on_stack)
2837 stack_arg_under_construction++;
2838 #endif
2839 arg->value = expand_expr (pval,
2840 (partial
2841 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
2842 ? NULL_RTX : arg->stack,
2843 VOIDmode, 0);
2845 /* If we are promoting object (or for any other reason) the mode
2846 doesn't agree, convert the mode. */
2848 if (GET_MODE (arg->value) != VOIDmode
2849 && GET_MODE (arg->value) != arg->mode)
2850 arg->value = convert_to_mode (arg->mode, arg->value, arg->unsignedp);
2852 #ifdef ACCUMULATE_OUTGOING_ARGS
2853 if (arg->pass_on_stack)
2854 stack_arg_under_construction--;
2855 #endif
2858 /* Don't allow anything left on stack from computation
2859 of argument to alloca. */
2860 if (may_be_alloca)
2861 do_pending_stack_adjust ();
2863 if (arg->value == arg->stack)
2864 /* If the value is already in the stack slot, we are done. */
2866 else if (arg->mode != BLKmode)
2868 register int size;
2870 /* Argument is a scalar, not entirely passed in registers.
2871 (If part is passed in registers, arg->partial says how much
2872 and emit_push_insn will take care of putting it there.)
2874 Push it, and if its size is less than the
2875 amount of space allocated to it,
2876 also bump stack pointer by the additional space.
2877 Note that in C the default argument promotions
2878 will prevent such mismatches. */
2880 size = GET_MODE_SIZE (arg->mode);
2881 /* Compute how much space the push instruction will push.
2882 On many machines, pushing a byte will advance the stack
2883 pointer by a halfword. */
2884 #ifdef PUSH_ROUNDING
2885 size = PUSH_ROUNDING (size);
2886 #endif
2887 used = size;
2889 /* Compute how much space the argument should get:
2890 round up to a multiple of the alignment for arguments. */
2891 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
2892 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
2893 / (PARM_BOUNDARY / BITS_PER_UNIT))
2894 * (PARM_BOUNDARY / BITS_PER_UNIT));
2896 /* This isn't already where we want it on the stack, so put it there.
2897 This can either be done with push or copy insns. */
2898 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
2899 0, partial, reg, used - size,
2900 argblock, ARGS_SIZE_RTX (arg->offset));
2902 else
2904 /* BLKmode, at least partly to be pushed. */
2906 register int excess;
2907 rtx size_rtx;
2909 /* Pushing a nonscalar.
2910 If part is passed in registers, PARTIAL says how much
2911 and emit_push_insn will take care of putting it there. */
2913 /* Round its size up to a multiple
2914 of the allocation unit for arguments. */
2916 if (arg->size.var != 0)
2918 excess = 0;
2919 size_rtx = ARGS_SIZE_RTX (arg->size);
2921 else
2923 /* PUSH_ROUNDING has no effect on us, because
2924 emit_push_insn for BLKmode is careful to avoid it. */
2925 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
2926 + partial * UNITS_PER_WORD);
2927 size_rtx = expr_size (pval);
2930 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
2931 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
2932 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset));
2936 /* Unless this is a partially-in-register argument, the argument is now
2937 in the stack.
2939 ??? Note that this can change arg->value from arg->stack to
2940 arg->stack_slot and it matters when they are not the same.
2941 It isn't totally clear that this is correct in all cases. */
2942 if (partial == 0)
2943 arg->value = arg->stack_slot;
2945 /* Once we have pushed something, pops can't safely
2946 be deferred during the rest of the arguments. */
2947 NO_DEFER_POP;
2949 /* ANSI doesn't require a sequence point here,
2950 but PCC has one, so this will avoid some problems. */
2951 emit_queue ();
2953 /* Free any temporary slots made in processing this argument. */
2954 free_temp_slots ();
2956 #ifdef ACCUMULATE_OUTGOING_ARGS
2957 /* Now mark the segment we just used. */
2958 if (argblock && ! variable_size && arg->stack)
2959 for (i = lower_bound; i < upper_bound; i++)
2960 stack_usage_map[i] = 1;
2961 #endif