* optimize.c (initialize_inlined_parameters): Take FN to which the
[official-gcc.git] / gcc / calls.c
blobe19f787d334587617d93d7e6d9059b9994c7718d
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 92-97, 1998, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "rtl.h"
24 #include "tree.h"
25 #include "flags.h"
26 #include "expr.h"
27 #include "function.h"
28 #include "regs.h"
29 #include "insn-flags.h"
30 #include "toplev.h"
31 #include "output.h"
32 #include "tm_p.h"
34 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
35 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
36 #endif
38 /* Decide whether a function's arguments should be processed
39 from first to last or from last to first.
41 They should if the stack and args grow in opposite directions, but
42 only if we have push insns. */
44 #ifdef PUSH_ROUNDING
46 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
47 #define PUSH_ARGS_REVERSED /* If it's last to first */
48 #endif
50 #endif
52 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
53 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
55 /* Data structure and subroutines used within expand_call. */
57 struct arg_data
59 /* Tree node for this argument. */
60 tree tree_value;
61 /* Mode for value; TYPE_MODE unless promoted. */
62 enum machine_mode mode;
63 /* Current RTL value for argument, or 0 if it isn't precomputed. */
64 rtx value;
65 /* Initially-compute RTL value for argument; only for const functions. */
66 rtx initial_value;
67 /* Register to pass this argument in, 0 if passed on stack, or an
68 PARALLEL if the arg is to be copied into multiple non-contiguous
69 registers. */
70 rtx reg;
71 /* If REG was promoted from the actual mode of the argument expression,
72 indicates whether the promotion is sign- or zero-extended. */
73 int unsignedp;
74 /* Number of registers to use. 0 means put the whole arg in registers.
75 Also 0 if not passed in registers. */
76 int partial;
77 /* Non-zero if argument must be passed on stack.
78 Note that some arguments may be passed on the stack
79 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
80 pass_on_stack identifies arguments that *cannot* go in registers. */
81 int pass_on_stack;
82 /* Offset of this argument from beginning of stack-args. */
83 struct args_size offset;
84 /* Similar, but offset to the start of the stack slot. Different from
85 OFFSET if this arg pads downward. */
86 struct args_size slot_offset;
87 /* Size of this argument on the stack, rounded up for any padding it gets,
88 parts of the argument passed in registers do not count.
89 If REG_PARM_STACK_SPACE is defined, then register parms
90 are counted here as well. */
91 struct args_size size;
92 /* Location on the stack at which parameter should be stored. The store
93 has already been done if STACK == VALUE. */
94 rtx stack;
95 /* Location on the stack of the start of this argument slot. This can
96 differ from STACK if this arg pads downward. This location is known
97 to be aligned to FUNCTION_ARG_BOUNDARY. */
98 rtx stack_slot;
99 #ifdef ACCUMULATE_OUTGOING_ARGS
100 /* Place that this stack area has been saved, if needed. */
101 rtx save_area;
102 #endif
103 /* If an argument's alignment does not permit direct copying into registers,
104 copy in smaller-sized pieces into pseudos. These are stored in a
105 block pointed to by this field. The next field says how many
106 word-sized pseudos we made. */
107 rtx *aligned_regs;
108 int n_aligned_regs;
109 /* The amount that the stack pointer needs to be adjusted to
110 force alignment for the next argument. */
111 struct args_size alignment_pad;
114 #ifdef ACCUMULATE_OUTGOING_ARGS
115 /* A vector of one char per byte of stack space. A byte if non-zero if
116 the corresponding stack location has been used.
117 This vector is used to prevent a function call within an argument from
118 clobbering any stack already set up. */
119 static char *stack_usage_map;
121 /* Size of STACK_USAGE_MAP. */
122 static int highest_outgoing_arg_in_use;
124 /* stack_arg_under_construction is nonzero when an argument may be
125 initialized with a constructor call (including a C function that
126 returns a BLKmode struct) and expand_call must take special action
127 to make sure the object being constructed does not overlap the
128 argument list for the constructor call. */
129 int stack_arg_under_construction;
130 #endif
132 static int calls_function PROTO ((tree, int));
133 static int calls_function_1 PROTO ((tree, int));
134 static void emit_call_1 PROTO ((rtx, tree, tree, HOST_WIDE_INT,
135 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
136 rtx, int, rtx, int));
137 static void precompute_register_parameters PROTO ((int, struct arg_data *,
138 int *));
139 static void store_one_arg PROTO ((struct arg_data *, rtx, int, int,
140 int));
141 static void store_unaligned_arguments_into_pseudos PROTO ((struct arg_data *,
142 int));
143 static int finalize_must_preallocate PROTO ((int, int,
144 struct arg_data *,
145 struct args_size *));
146 static void precompute_arguments PROTO ((int, int, int,
147 struct arg_data *,
148 struct args_size *));
149 static int compute_argument_block_size PROTO ((int,
150 struct args_size *));
151 static void initialize_argument_information PROTO ((int,
152 struct arg_data *,
153 struct args_size *,
154 int, tree, tree,
155 CUMULATIVE_ARGS *,
156 int, rtx *, int *,
157 int *, int *));
158 static void compute_argument_addresses PROTO ((struct arg_data *,
159 rtx, int));
160 static rtx rtx_for_function_call PROTO ((tree, tree));
161 static void load_register_parameters PROTO ((struct arg_data *,
162 int, rtx *));
164 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
165 static rtx save_fixed_argument_area PROTO ((int, rtx, int *, int *));
166 static void restore_fixed_argument_area PROTO ((rtx, rtx, int, int));
167 #endif
169 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
170 `alloca'.
172 If WHICH is 0, return 1 if EXP contains a call to any function.
173 Actually, we only need return 1 if evaluating EXP would require pushing
174 arguments on the stack, but that is too difficult to compute, so we just
175 assume any function call might require the stack. */
177 static tree calls_function_save_exprs;
179 static int
180 calls_function (exp, which)
181 tree exp;
182 int which;
184 int val;
185 calls_function_save_exprs = 0;
186 val = calls_function_1 (exp, which);
187 calls_function_save_exprs = 0;
188 return val;
191 static int
192 calls_function_1 (exp, which)
193 tree exp;
194 int which;
196 register int i;
197 enum tree_code code = TREE_CODE (exp);
198 int type = TREE_CODE_CLASS (code);
199 int length = tree_code_length[(int) code];
201 /* If this code is language-specific, we don't know what it will do. */
202 if ((int) code >= NUM_TREE_CODES)
203 return 1;
205 /* Only expressions and references can contain calls. */
206 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
207 && type != 'b')
208 return 0;
210 switch (code)
212 case CALL_EXPR:
213 if (which == 0)
214 return 1;
215 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
216 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
217 == FUNCTION_DECL))
219 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
221 if ((DECL_BUILT_IN (fndecl)
222 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
223 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
224 || (DECL_SAVED_INSNS (fndecl)
225 && DECL_SAVED_INSNS (fndecl)->calls_alloca))
226 return 1;
229 /* Third operand is RTL. */
230 length = 2;
231 break;
233 case SAVE_EXPR:
234 if (SAVE_EXPR_RTL (exp) != 0)
235 return 0;
236 if (value_member (exp, calls_function_save_exprs))
237 return 0;
238 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
239 calls_function_save_exprs);
240 return (TREE_OPERAND (exp, 0) != 0
241 && calls_function_1 (TREE_OPERAND (exp, 0), which));
243 case BLOCK:
245 register tree local;
247 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
248 if (DECL_INITIAL (local) != 0
249 && calls_function_1 (DECL_INITIAL (local), which))
250 return 1;
253 register tree subblock;
255 for (subblock = BLOCK_SUBBLOCKS (exp);
256 subblock;
257 subblock = TREE_CHAIN (subblock))
258 if (calls_function_1 (subblock, which))
259 return 1;
261 return 0;
263 case METHOD_CALL_EXPR:
264 length = 3;
265 break;
267 case WITH_CLEANUP_EXPR:
268 length = 1;
269 break;
271 case RTL_EXPR:
272 return 0;
274 default:
275 break;
278 for (i = 0; i < length; i++)
279 if (TREE_OPERAND (exp, i) != 0
280 && calls_function_1 (TREE_OPERAND (exp, i), which))
281 return 1;
283 return 0;
286 /* Force FUNEXP into a form suitable for the address of a CALL,
287 and return that as an rtx. Also load the static chain register
288 if FNDECL is a nested function.
290 CALL_FUSAGE points to a variable holding the prospective
291 CALL_INSN_FUNCTION_USAGE information. */
294 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
295 rtx funexp;
296 tree fndecl;
297 rtx *call_fusage;
298 int reg_parm_seen;
300 rtx static_chain_value = 0;
302 funexp = protect_from_queue (funexp, 0);
304 if (fndecl != 0)
305 /* Get possible static chain value for nested function in C. */
306 static_chain_value = lookup_static_chain (fndecl);
308 /* Make a valid memory address and copy constants thru pseudo-regs,
309 but not for a constant address if -fno-function-cse. */
310 if (GET_CODE (funexp) != SYMBOL_REF)
311 /* If we are using registers for parameters, force the
312 function address into a register now. */
313 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
314 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
315 : memory_address (FUNCTION_MODE, funexp));
316 else
318 #ifndef NO_FUNCTION_CSE
319 if (optimize && ! flag_no_function_cse)
320 #ifdef NO_RECURSIVE_FUNCTION_CSE
321 if (fndecl != current_function_decl)
322 #endif
323 funexp = force_reg (Pmode, funexp);
324 #endif
327 if (static_chain_value != 0)
329 emit_move_insn (static_chain_rtx, static_chain_value);
331 if (GET_CODE (static_chain_rtx) == REG)
332 use_reg (call_fusage, static_chain_rtx);
335 return funexp;
338 /* Generate instructions to call function FUNEXP,
339 and optionally pop the results.
340 The CALL_INSN is the first insn generated.
342 FNDECL is the declaration node of the function. This is given to the
343 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
345 FUNTYPE is the data type of the function. This is given to the macro
346 RETURN_POPS_ARGS to determine whether this function pops its own args.
347 We used to allow an identifier for library functions, but that doesn't
348 work when the return type is an aggregate type and the calling convention
349 says that the pointer to this aggregate is to be popped by the callee.
351 STACK_SIZE is the number of bytes of arguments on the stack,
352 rounded up to PREFERRED_STACK_BOUNDARY; zero if the size is variable.
353 This is both to put into the call insn and
354 to generate explicit popping code if necessary.
356 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
357 It is zero if this call doesn't want a structure value.
359 NEXT_ARG_REG is the rtx that results from executing
360 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
361 just after all the args have had their registers assigned.
362 This could be whatever you like, but normally it is the first
363 arg-register beyond those used for args in this call,
364 or 0 if all the arg-registers are used in this call.
365 It is passed on to `gen_call' so you can put this info in the call insn.
367 VALREG is a hard register in which a value is returned,
368 or 0 if the call does not return a value.
370 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
371 the args to this call were processed.
372 We restore `inhibit_defer_pop' to that value.
374 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
375 denote registers used by the called function.
377 IS_CONST is true if this is a `const' call. */
379 static void
380 emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
381 struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
382 call_fusage, is_const)
383 rtx funexp;
384 tree fndecl ATTRIBUTE_UNUSED;
385 tree funtype ATTRIBUTE_UNUSED;
386 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED;
387 HOST_WIDE_INT rounded_stack_size;
388 HOST_WIDE_INT struct_value_size;
389 rtx next_arg_reg;
390 rtx valreg;
391 int old_inhibit_defer_pop;
392 rtx call_fusage;
393 int is_const;
395 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
396 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
397 rtx call_insn;
398 #ifndef ACCUMULATE_OUTGOING_ARGS
399 int already_popped = 0;
400 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
401 #endif
403 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
404 and we don't want to load it into a register as an optimization,
405 because prepare_call_address already did it if it should be done. */
406 if (GET_CODE (funexp) != SYMBOL_REF)
407 funexp = memory_address (FUNCTION_MODE, funexp);
409 #ifndef ACCUMULATE_OUTGOING_ARGS
410 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
411 /* If the target has "call" or "call_value" insns, then prefer them
412 if no arguments are actually popped. If the target does not have
413 "call" or "call_value" insns, then we must use the popping versions
414 even if the call has no arguments to pop. */
415 #if defined (HAVE_call) && defined (HAVE_call_value)
416 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
417 && n_popped > 0)
418 #else
419 if (HAVE_call_pop && HAVE_call_value_pop)
420 #endif
422 rtx n_pop = GEN_INT (n_popped);
423 rtx pat;
425 /* If this subroutine pops its own args, record that in the call insn
426 if possible, for the sake of frame pointer elimination. */
428 if (valreg)
429 pat = gen_call_value_pop (valreg,
430 gen_rtx_MEM (FUNCTION_MODE, funexp),
431 rounded_stack_size_rtx, next_arg_reg, n_pop);
432 else
433 pat = gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
434 rounded_stack_size_rtx, next_arg_reg, n_pop);
436 emit_call_insn (pat);
437 already_popped = 1;
439 else
440 #endif
441 #endif
443 #if defined (HAVE_call) && defined (HAVE_call_value)
444 if (HAVE_call && HAVE_call_value)
446 if (valreg)
447 emit_call_insn (gen_call_value (valreg,
448 gen_rtx_MEM (FUNCTION_MODE, funexp),
449 rounded_stack_size_rtx, next_arg_reg,
450 NULL_RTX));
451 else
452 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, funexp),
453 rounded_stack_size_rtx, next_arg_reg,
454 struct_value_size_rtx));
456 else
457 #endif
458 abort ();
460 /* Find the CALL insn we just emitted. */
461 for (call_insn = get_last_insn ();
462 call_insn && GET_CODE (call_insn) != CALL_INSN;
463 call_insn = PREV_INSN (call_insn))
466 if (! call_insn)
467 abort ();
469 /* Put the register usage information on the CALL. If there is already
470 some usage information, put ours at the end. */
471 if (CALL_INSN_FUNCTION_USAGE (call_insn))
473 rtx link;
475 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
476 link = XEXP (link, 1))
479 XEXP (link, 1) = call_fusage;
481 else
482 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
484 /* If this is a const call, then set the insn's unchanging bit. */
485 if (is_const)
486 CONST_CALL_P (call_insn) = 1;
488 /* Restore this now, so that we do defer pops for this call's args
489 if the context of the call as a whole permits. */
490 inhibit_defer_pop = old_inhibit_defer_pop;
492 #ifndef ACCUMULATE_OUTGOING_ARGS
493 /* If returning from the subroutine does not automatically pop the args,
494 we need an instruction to pop them sooner or later.
495 Perhaps do it now; perhaps just record how much space to pop later.
497 If returning from the subroutine does pop the args, indicate that the
498 stack pointer will be changed. */
500 if (n_popped > 0)
502 if (!already_popped)
503 CALL_INSN_FUNCTION_USAGE (call_insn)
504 = gen_rtx_EXPR_LIST (VOIDmode,
505 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
506 CALL_INSN_FUNCTION_USAGE (call_insn));
507 rounded_stack_size -= n_popped;
508 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
511 if (rounded_stack_size != 0)
513 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
514 pending_stack_adjust += rounded_stack_size;
515 else
516 adjust_stack (rounded_stack_size_rtx);
518 #endif
521 /* Determine if the function identified by NAME and FNDECL is one with
522 special properties we wish to know about.
524 For example, if the function might return more than one time (setjmp), then
525 set RETURNS_TWICE to a nonzero value.
527 Similarly set IS_LONGJMP for if the function is in the longjmp family.
529 Set IS_MALLOC for any of the standard memory allocation functions which
530 allocate from the heap.
532 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
533 space from the stack such as alloca. */
535 void
536 special_function_p (fndecl, returns_twice, is_longjmp,
537 is_malloc, may_be_alloca)
538 tree fndecl;
539 int *returns_twice;
540 int *is_longjmp;
541 int *is_malloc;
542 int *may_be_alloca;
544 *returns_twice = 0;
545 *is_longjmp = 0;
546 *may_be_alloca = 0;
548 /* The function decl may have the `malloc' attribute. */
549 *is_malloc = fndecl && DECL_IS_MALLOC (fndecl);
551 if (! *is_malloc
552 && fndecl && DECL_NAME (fndecl)
553 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
554 /* Exclude functions not at the file scope, or not `extern',
555 since they are not the magic functions we would otherwise
556 think they are. */
557 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
559 char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
560 char *tname = name;
562 /* We assume that alloca will always be called by name. It
563 makes no sense to pass it as a pointer-to-function to
564 anything that does not understand its behavior. */
565 *may_be_alloca
566 = (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
567 && name[0] == 'a'
568 && ! strcmp (name, "alloca"))
569 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
570 && name[0] == '_'
571 && ! strcmp (name, "__builtin_alloca"))));
573 /* Disregard prefix _, __ or __x. */
574 if (name[0] == '_')
576 if (name[1] == '_' && name[2] == 'x')
577 tname += 3;
578 else if (name[1] == '_')
579 tname += 2;
580 else
581 tname += 1;
584 if (tname[0] == 's')
586 *returns_twice
587 = ((tname[1] == 'e'
588 && (! strcmp (tname, "setjmp")
589 || ! strcmp (tname, "setjmp_syscall")))
590 || (tname[1] == 'i'
591 && ! strcmp (tname, "sigsetjmp"))
592 || (tname[1] == 'a'
593 && ! strcmp (tname, "savectx")));
594 if (tname[1] == 'i'
595 && ! strcmp (tname, "siglongjmp"))
596 *is_longjmp = 1;
598 else if ((tname[0] == 'q' && tname[1] == 's'
599 && ! strcmp (tname, "qsetjmp"))
600 || (tname[0] == 'v' && tname[1] == 'f'
601 && ! strcmp (tname, "vfork")))
602 *returns_twice = 1;
604 else if (tname[0] == 'l' && tname[1] == 'o'
605 && ! strcmp (tname, "longjmp"))
606 *is_longjmp = 1;
607 /* Do not add any more malloc-like functions to this list,
608 instead mark them as malloc functions using the malloc attribute.
609 Note, realloc is not suitable for attribute malloc since
610 it may return the same address across multiple calls. */
611 else if (! strcmp (tname, "malloc")
612 || ! strcmp (tname, "calloc")
613 || ! strcmp (tname, "strdup")
614 /* Note use of NAME rather than TNAME here. These functions
615 are only reserved when preceded with __. */
616 || ! strcmp (name, "__vn") /* mangled __builtin_vec_new */
617 || ! strcmp (name, "__nw") /* mangled __builtin_new */
618 || ! strcmp (name, "__builtin_new")
619 || ! strcmp (name, "__builtin_vec_new"))
620 *is_malloc = 1;
624 /* Precompute all register parameters as described by ARGS, storing values
625 into fields within the ARGS array.
627 NUM_ACTUALS indicates the total number elements in the ARGS array.
629 Set REG_PARM_SEEN if we encounter a register parameter. */
631 static void
632 precompute_register_parameters (num_actuals, args, reg_parm_seen)
633 int num_actuals;
634 struct arg_data *args;
635 int *reg_parm_seen;
637 int i;
639 *reg_parm_seen = 0;
641 for (i = 0; i < num_actuals; i++)
642 if (args[i].reg != 0 && ! args[i].pass_on_stack)
644 *reg_parm_seen = 1;
646 if (args[i].value == 0)
648 push_temp_slots ();
649 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
650 VOIDmode, 0);
651 preserve_temp_slots (args[i].value);
652 pop_temp_slots ();
654 /* ANSI doesn't require a sequence point here,
655 but PCC has one, so this will avoid some problems. */
656 emit_queue ();
659 /* If we are to promote the function arg to a wider mode,
660 do it now. */
662 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
663 args[i].value
664 = convert_modes (args[i].mode,
665 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
666 args[i].value, args[i].unsignedp);
668 /* If the value is expensive, and we are inside an appropriately
669 short loop, put the value into a pseudo and then put the pseudo
670 into the hard reg.
672 For small register classes, also do this if this call uses
673 register parameters. This is to avoid reload conflicts while
674 loading the parameters registers. */
676 if ((! (GET_CODE (args[i].value) == REG
677 || (GET_CODE (args[i].value) == SUBREG
678 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
679 && args[i].mode != BLKmode
680 && rtx_cost (args[i].value, SET) > 2
681 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
682 || preserve_subexpressions_p ()))
683 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
687 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
689 /* The argument list is the property of the called routine and it
690 may clobber it. If the fixed area has been used for previous
691 parameters, we must save and restore it. */
692 static rtx
693 save_fixed_argument_area (reg_parm_stack_space, argblock,
694 low_to_save, high_to_save)
695 int reg_parm_stack_space;
696 rtx argblock;
697 int *low_to_save;
698 int *high_to_save;
700 int i;
701 rtx save_area = NULL_RTX;
703 /* Compute the boundary of the that needs to be saved, if any. */
704 #ifdef ARGS_GROW_DOWNWARD
705 for (i = 0; i < reg_parm_stack_space + 1; i++)
706 #else
707 for (i = 0; i < reg_parm_stack_space; i++)
708 #endif
710 if (i >= highest_outgoing_arg_in_use
711 || stack_usage_map[i] == 0)
712 continue;
714 if (*low_to_save == -1)
715 *low_to_save = i;
717 *high_to_save = i;
720 if (*low_to_save >= 0)
722 int num_to_save = *high_to_save - *low_to_save + 1;
723 enum machine_mode save_mode
724 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
725 rtx stack_area;
727 /* If we don't have the required alignment, must do this in BLKmode. */
728 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
729 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
730 save_mode = BLKmode;
732 #ifdef ARGS_GROW_DOWNWARD
733 stack_area = gen_rtx_MEM (save_mode,
734 memory_address (save_mode,
735 plus_constant (argblock,
736 - *high_to_save)));
737 #else
738 stack_area = gen_rtx_MEM (save_mode,
739 memory_address (save_mode,
740 plus_constant (argblock,
741 *low_to_save)));
742 #endif
743 if (save_mode == BLKmode)
745 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
746 /* Cannot use emit_block_move here because it can be done by a library
747 call which in turn gets into this place again and deadly infinite
748 recursion happens. */
749 move_by_pieces (validize_mem (save_area), stack_area, num_to_save,
750 PARM_BOUNDARY / BITS_PER_UNIT);
752 else
754 save_area = gen_reg_rtx (save_mode);
755 emit_move_insn (save_area, stack_area);
758 return save_area;
761 static void
762 restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
763 rtx save_area;
764 rtx argblock;
765 int high_to_save;
766 int low_to_save;
768 enum machine_mode save_mode = GET_MODE (save_area);
769 #ifdef ARGS_GROW_DOWNWARD
770 rtx stack_area
771 = gen_rtx_MEM (save_mode,
772 memory_address (save_mode,
773 plus_constant (argblock,
774 - high_to_save)));
775 #else
776 rtx stack_area
777 = gen_rtx_MEM (save_mode,
778 memory_address (save_mode,
779 plus_constant (argblock,
780 low_to_save)));
781 #endif
783 if (save_mode != BLKmode)
784 emit_move_insn (stack_area, save_area);
785 else
786 /* Cannot use emit_block_move here because it can be done by a library
787 call which in turn gets into this place again and deadly infinite
788 recursion happens. */
789 move_by_pieces (stack_area, validize_mem (save_area),
790 high_to_save - low_to_save + 1,
791 PARM_BOUNDARY / BITS_PER_UNIT);
793 #endif
795 /* If any elements in ARGS refer to parameters that are to be passed in
796 registers, but not in memory, and whose alignment does not permit a
797 direct copy into registers. Copy the values into a group of pseudos
798 which we will later copy into the appropriate hard registers.
800 Pseudos for each unaligned argument will be stored into the array
801 args[argnum].aligned_regs. The caller is responsible for deallocating
802 the aligned_regs array if it is nonzero. */
804 static void
805 store_unaligned_arguments_into_pseudos (args, num_actuals)
806 struct arg_data *args;
807 int num_actuals;
809 int i, j;
811 for (i = 0; i < num_actuals; i++)
812 if (args[i].reg != 0 && ! args[i].pass_on_stack
813 && args[i].mode == BLKmode
814 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
815 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
817 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
818 int big_endian_correction = 0;
820 args[i].n_aligned_regs
821 = args[i].partial ? args[i].partial
822 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
824 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
825 * args[i].n_aligned_regs);
827 /* Structures smaller than a word are aligned to the least
828 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
829 this means we must skip the empty high order bytes when
830 calculating the bit offset. */
831 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
832 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
834 for (j = 0; j < args[i].n_aligned_regs; j++)
836 rtx reg = gen_reg_rtx (word_mode);
837 rtx word = operand_subword_force (args[i].value, j, BLKmode);
838 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
839 int bitalign = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
841 args[i].aligned_regs[j] = reg;
843 /* There is no need to restrict this code to loading items
844 in TYPE_ALIGN sized hunks. The bitfield instructions can
845 load up entire word sized registers efficiently.
847 ??? This may not be needed anymore.
848 We use to emit a clobber here but that doesn't let later
849 passes optimize the instructions we emit. By storing 0 into
850 the register later passes know the first AND to zero out the
851 bitfield being set in the register is unnecessary. The store
852 of 0 will be deleted as will at least the first AND. */
854 emit_move_insn (reg, const0_rtx);
856 bytes -= bitsize / BITS_PER_UNIT;
857 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
858 extract_bit_field (word, bitsize, 0, 1,
859 NULL_RTX, word_mode,
860 word_mode,
861 bitalign / BITS_PER_UNIT,
862 BITS_PER_WORD),
863 bitalign / BITS_PER_UNIT, BITS_PER_WORD);
868 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
869 ACTPARMS.
871 NUM_ACTUALS is the total number of parameters.
873 N_NAMED_ARGS is the total number of named arguments.
875 FNDECL is the tree code for the target of this call (if known)
877 ARGS_SO_FAR holds state needed by the target to know where to place
878 the next argument.
880 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
881 for arguments which are passed in registers.
883 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
884 and may be modified by this routine.
886 OLD_PENDING_ADJ, MUST_PREALLOCATE and IS_CONST are pointers to integer
887 flags which may may be modified by this routine. */
889 static void
890 initialize_argument_information (num_actuals, args, args_size, n_named_args,
891 actparms, fndecl, args_so_far,
892 reg_parm_stack_space, old_stack_level,
893 old_pending_adj, must_preallocate, is_const)
894 int num_actuals ATTRIBUTE_UNUSED;
895 struct arg_data *args;
896 struct args_size *args_size;
897 int n_named_args ATTRIBUTE_UNUSED;
898 tree actparms;
899 tree fndecl;
900 CUMULATIVE_ARGS *args_so_far;
901 int reg_parm_stack_space;
902 rtx *old_stack_level;
903 int *old_pending_adj;
904 int *must_preallocate;
905 int *is_const;
907 /* 1 if scanning parms front to back, -1 if scanning back to front. */
908 int inc;
910 /* Count arg position in order args appear. */
911 int argpos;
913 struct args_size alignment_pad;
914 int i;
915 tree p;
917 args_size->constant = 0;
918 args_size->var = 0;
920 /* In this loop, we consider args in the order they are written.
921 We fill up ARGS from the front or from the back if necessary
922 so that in any case the first arg to be pushed ends up at the front. */
924 #ifdef PUSH_ARGS_REVERSED
925 i = num_actuals - 1, inc = -1;
926 /* In this case, must reverse order of args
927 so that we compute and push the last arg first. */
928 #else
929 i = 0, inc = 1;
930 #endif
932 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
933 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
935 tree type = TREE_TYPE (TREE_VALUE (p));
936 int unsignedp;
937 enum machine_mode mode;
939 args[i].tree_value = TREE_VALUE (p);
941 /* Replace erroneous argument with constant zero. */
942 if (type == error_mark_node || TYPE_SIZE (type) == 0)
943 args[i].tree_value = integer_zero_node, type = integer_type_node;
945 /* If TYPE is a transparent union, pass things the way we would
946 pass the first field of the union. We have already verified that
947 the modes are the same. */
948 if (TYPE_TRANSPARENT_UNION (type))
949 type = TREE_TYPE (TYPE_FIELDS (type));
951 /* Decide where to pass this arg.
953 args[i].reg is nonzero if all or part is passed in registers.
955 args[i].partial is nonzero if part but not all is passed in registers,
956 and the exact value says how many words are passed in registers.
958 args[i].pass_on_stack is nonzero if the argument must at least be
959 computed on the stack. It may then be loaded back into registers
960 if args[i].reg is nonzero.
962 These decisions are driven by the FUNCTION_... macros and must agree
963 with those made by function.c. */
965 /* See if this argument should be passed by invisible reference. */
966 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
967 && contains_placeholder_p (TYPE_SIZE (type)))
968 || TREE_ADDRESSABLE (type)
969 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
970 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
971 type, argpos < n_named_args)
972 #endif
975 /* If we're compiling a thunk, pass through invisible
976 references instead of making a copy. */
977 if (current_function_is_thunk
978 #ifdef FUNCTION_ARG_CALLEE_COPIES
979 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
980 type, argpos < n_named_args)
981 /* If it's in a register, we must make a copy of it too. */
982 /* ??? Is this a sufficient test? Is there a better one? */
983 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
984 && REG_P (DECL_RTL (args[i].tree_value)))
985 && ! TREE_ADDRESSABLE (type))
986 #endif
989 /* C++ uses a TARGET_EXPR to indicate that we want to make a
990 new object from the argument. If we are passing by
991 invisible reference, the callee will do that for us, so we
992 can strip off the TARGET_EXPR. This is not always safe,
993 but it is safe in the only case where this is a useful
994 optimization; namely, when the argument is a plain object.
995 In that case, the frontend is just asking the backend to
996 make a bitwise copy of the argument. */
998 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
999 && (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND
1000 (args[i].tree_value, 1)))
1001 == 'd')
1002 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1003 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1005 args[i].tree_value = build1 (ADDR_EXPR,
1006 build_pointer_type (type),
1007 args[i].tree_value);
1008 type = build_pointer_type (type);
1010 else
1012 /* We make a copy of the object and pass the address to the
1013 function being called. */
1014 rtx copy;
1016 if (TYPE_SIZE (type) == 0
1017 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1018 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1019 && (TREE_INT_CST_HIGH (TYPE_SIZE (type)) != 0
1020 || (TREE_INT_CST_LOW (TYPE_SIZE (type))
1021 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
1023 /* This is a variable-sized object. Make space on the stack
1024 for it. */
1025 rtx size_rtx = expr_size (TREE_VALUE (p));
1027 if (*old_stack_level == 0)
1029 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1030 *old_pending_adj = pending_stack_adjust;
1031 pending_stack_adjust = 0;
1034 copy = gen_rtx_MEM (BLKmode,
1035 allocate_dynamic_stack_space (size_rtx,
1036 NULL_RTX,
1037 TYPE_ALIGN (type)));
1039 else
1041 int size = int_size_in_bytes (type);
1042 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
1045 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
1047 store_expr (args[i].tree_value, copy, 0);
1048 *is_const = 0;
1050 args[i].tree_value = build1 (ADDR_EXPR,
1051 build_pointer_type (type),
1052 make_tree (type, copy));
1053 type = build_pointer_type (type);
1057 mode = TYPE_MODE (type);
1058 unsignedp = TREE_UNSIGNED (type);
1060 #ifdef PROMOTE_FUNCTION_ARGS
1061 mode = promote_mode (type, mode, &unsignedp, 1);
1062 #endif
1064 args[i].unsignedp = unsignedp;
1065 args[i].mode = mode;
1066 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1067 argpos < n_named_args);
1068 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1069 if (args[i].reg)
1070 args[i].partial
1071 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1072 argpos < n_named_args);
1073 #endif
1075 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1077 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1078 it means that we are to pass this arg in the register(s) designated
1079 by the PARALLEL, but also to pass it in the stack. */
1080 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1081 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1082 args[i].pass_on_stack = 1;
1084 /* If this is an addressable type, we must preallocate the stack
1085 since we must evaluate the object into its final location.
1087 If this is to be passed in both registers and the stack, it is simpler
1088 to preallocate. */
1089 if (TREE_ADDRESSABLE (type)
1090 || (args[i].pass_on_stack && args[i].reg != 0))
1091 *must_preallocate = 1;
1093 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1094 we cannot consider this function call constant. */
1095 if (TREE_ADDRESSABLE (type))
1096 *is_const = 0;
1098 /* Compute the stack-size of this argument. */
1099 if (args[i].reg == 0 || args[i].partial != 0
1100 || reg_parm_stack_space > 0
1101 || args[i].pass_on_stack)
1102 locate_and_pad_parm (mode, type,
1103 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1105 #else
1106 args[i].reg != 0,
1107 #endif
1108 fndecl, args_size, &args[i].offset,
1109 &args[i].size, &alignment_pad);
1111 #ifndef ARGS_GROW_DOWNWARD
1112 args[i].slot_offset = *args_size;
1113 #endif
1115 args[i].alignment_pad = alignment_pad;
1117 /* If a part of the arg was put into registers,
1118 don't include that part in the amount pushed. */
1119 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1120 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1121 / (PARM_BOUNDARY / BITS_PER_UNIT)
1122 * (PARM_BOUNDARY / BITS_PER_UNIT));
1124 /* Update ARGS_SIZE, the total stack space for args so far. */
1126 args_size->constant += args[i].size.constant;
1127 if (args[i].size.var)
1129 ADD_PARM_SIZE (*args_size, args[i].size.var);
1132 /* Since the slot offset points to the bottom of the slot,
1133 we must record it after incrementing if the args grow down. */
1134 #ifdef ARGS_GROW_DOWNWARD
1135 args[i].slot_offset = *args_size;
1137 args[i].slot_offset.constant = -args_size->constant;
1138 if (args_size->var)
1140 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
1142 #endif
1144 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1145 have been used, etc. */
1147 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1148 argpos < n_named_args);
1152 /* Update ARGS_SIZE to contain the total size for the argument block.
1153 Return the original constant component of the argument block's size.
1155 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1156 for arguments passed in registers. */
1158 static int
1159 compute_argument_block_size (reg_parm_stack_space, args_size)
1160 int reg_parm_stack_space;
1161 struct args_size *args_size;
1163 int unadjusted_args_size = args_size->constant;
1165 /* Compute the actual size of the argument block required. The variable
1166 and constant sizes must be combined, the size may have to be rounded,
1167 and there may be a minimum required size. */
1169 if (args_size->var)
1171 args_size->var = ARGS_SIZE_TREE (*args_size);
1172 args_size->constant = 0;
1174 #ifdef PREFERRED_STACK_BOUNDARY
1175 if (PREFERRED_STACK_BOUNDARY != BITS_PER_UNIT)
1176 args_size->var = round_up (args_size->var, STACK_BYTES);
1177 #endif
1179 if (reg_parm_stack_space > 0)
1181 args_size->var
1182 = size_binop (MAX_EXPR, args_size->var,
1183 size_int (reg_parm_stack_space));
1185 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1186 /* The area corresponding to register parameters is not to count in
1187 the size of the block we need. So make the adjustment. */
1188 args_size->var
1189 = size_binop (MINUS_EXPR, args_size->var,
1190 size_int (reg_parm_stack_space));
1191 #endif
1194 else
1196 #ifdef PREFERRED_STACK_BOUNDARY
1197 args_size->constant = (((args_size->constant
1198 + pending_stack_adjust
1199 + STACK_BYTES - 1)
1200 / STACK_BYTES * STACK_BYTES)
1201 - pending_stack_adjust);
1202 #endif
1204 args_size->constant = MAX (args_size->constant,
1205 reg_parm_stack_space);
1207 #ifdef MAYBE_REG_PARM_STACK_SPACE
1208 if (reg_parm_stack_space == 0)
1209 args_size->constant = 0;
1210 #endif
1212 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1213 args_size->constant -= reg_parm_stack_space;
1214 #endif
1216 return unadjusted_args_size;
1219 /* Precompute parameters as needed for a function call.
1221 IS_CONST indicates the target function is a pure function.
1223 MUST_PREALLOCATE indicates that we must preallocate stack space for
1224 any stack arguments.
1226 NUM_ACTUALS is the number of arguments.
1228 ARGS is an array containing information for each argument; this routine
1229 fills in the INITIAL_VALUE and VALUE fields for each precomputed argument.
1231 ARGS_SIZE contains information about the size of the arg list. */
1233 static void
1234 precompute_arguments (is_const, must_preallocate, num_actuals, args, args_size)
1235 int is_const;
1236 int must_preallocate;
1237 int num_actuals;
1238 struct arg_data *args;
1239 struct args_size *args_size;
1241 int i;
1243 /* If this function call is cse'able, precompute all the parameters.
1244 Note that if the parameter is constructed into a temporary, this will
1245 cause an additional copy because the parameter will be constructed
1246 into a temporary location and then copied into the outgoing arguments.
1247 If a parameter contains a call to alloca and this function uses the
1248 stack, precompute the parameter. */
1250 /* If we preallocated the stack space, and some arguments must be passed
1251 on the stack, then we must precompute any parameter which contains a
1252 function call which will store arguments on the stack.
1253 Otherwise, evaluating the parameter may clobber previous parameters
1254 which have already been stored into the stack. */
1256 for (i = 0; i < num_actuals; i++)
1257 if (is_const
1258 || ((args_size->var != 0 || args_size->constant != 0)
1259 && calls_function (args[i].tree_value, 1))
1260 || (must_preallocate
1261 && (args_size->var != 0 || args_size->constant != 0)
1262 && calls_function (args[i].tree_value, 0)))
1264 /* If this is an addressable type, we cannot pre-evaluate it. */
1265 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1266 abort ();
1268 push_temp_slots ();
1270 args[i].value
1271 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1273 preserve_temp_slots (args[i].value);
1274 pop_temp_slots ();
1276 /* ANSI doesn't require a sequence point here,
1277 but PCC has one, so this will avoid some problems. */
1278 emit_queue ();
1280 args[i].initial_value = args[i].value
1281 = protect_from_queue (args[i].value, 0);
1283 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1285 args[i].value
1286 = convert_modes (args[i].mode,
1287 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1288 args[i].value, args[i].unsignedp);
1289 #ifdef PROMOTE_FOR_CALL_ONLY
1290 /* CSE will replace this only if it contains args[i].value
1291 pseudo, so convert it down to the declared mode using
1292 a SUBREG. */
1293 if (GET_CODE (args[i].value) == REG
1294 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1296 args[i].initial_value
1297 = gen_rtx_SUBREG (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1298 args[i].value, 0);
1299 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1300 SUBREG_PROMOTED_UNSIGNED_P (args[i].initial_value)
1301 = args[i].unsignedp;
1303 #endif
1308 /* Given the current state of MUST_PREALLOCATE and information about
1309 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1310 compute and return the final value for MUST_PREALLOCATE. */
1312 static int
1313 finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1314 int must_preallocate;
1315 int num_actuals;
1316 struct arg_data *args;
1317 struct args_size *args_size;
1319 /* See if we have or want to preallocate stack space.
1321 If we would have to push a partially-in-regs parm
1322 before other stack parms, preallocate stack space instead.
1324 If the size of some parm is not a multiple of the required stack
1325 alignment, we must preallocate.
1327 If the total size of arguments that would otherwise create a copy in
1328 a temporary (such as a CALL) is more than half the total argument list
1329 size, preallocation is faster.
1331 Another reason to preallocate is if we have a machine (like the m88k)
1332 where stack alignment is required to be maintained between every
1333 pair of insns, not just when the call is made. However, we assume here
1334 that such machines either do not have push insns (and hence preallocation
1335 would occur anyway) or the problem is taken care of with
1336 PUSH_ROUNDING. */
1338 if (! must_preallocate)
1340 int partial_seen = 0;
1341 int copy_to_evaluate_size = 0;
1342 int i;
1344 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1346 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1347 partial_seen = 1;
1348 else if (partial_seen && args[i].reg == 0)
1349 must_preallocate = 1;
1351 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1352 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1353 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1354 || TREE_CODE (args[i].tree_value) == COND_EXPR
1355 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1356 copy_to_evaluate_size
1357 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1360 if (copy_to_evaluate_size * 2 >= args_size->constant
1361 && args_size->constant > 0)
1362 must_preallocate = 1;
1364 return must_preallocate;
1367 /* If we preallocated stack space, compute the address of each argument
1368 and store it into the ARGS array.
1370 We need not ensure it is a valid memory address here; it will be
1371 validized when it is used.
1373 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1375 static void
1376 compute_argument_addresses (args, argblock, num_actuals)
1377 struct arg_data *args;
1378 rtx argblock;
1379 int num_actuals;
1381 if (argblock)
1383 rtx arg_reg = argblock;
1384 int i, arg_offset = 0;
1386 if (GET_CODE (argblock) == PLUS)
1387 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1389 for (i = 0; i < num_actuals; i++)
1391 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1392 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1393 rtx addr;
1395 /* Skip this parm if it will not be passed on the stack. */
1396 if (! args[i].pass_on_stack && args[i].reg != 0)
1397 continue;
1399 if (GET_CODE (offset) == CONST_INT)
1400 addr = plus_constant (arg_reg, INTVAL (offset));
1401 else
1402 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1404 addr = plus_constant (addr, arg_offset);
1405 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1406 MEM_SET_IN_STRUCT_P
1407 (args[i].stack,
1408 AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value)));
1410 if (GET_CODE (slot_offset) == CONST_INT)
1411 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1412 else
1413 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1415 addr = plus_constant (addr, arg_offset);
1416 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1421 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1422 in a call instruction.
1424 FNDECL is the tree node for the target function. For an indirect call
1425 FNDECL will be NULL_TREE.
1427 EXP is the CALL_EXPR for this call. */
1429 static rtx
1430 rtx_for_function_call (fndecl, exp)
1431 tree fndecl;
1432 tree exp;
1434 rtx funexp;
1436 /* Get the function to call, in the form of RTL. */
1437 if (fndecl)
1439 /* If this is the first use of the function, see if we need to
1440 make an external definition for it. */
1441 if (! TREE_USED (fndecl))
1443 assemble_external (fndecl);
1444 TREE_USED (fndecl) = 1;
1447 /* Get a SYMBOL_REF rtx for the function address. */
1448 funexp = XEXP (DECL_RTL (fndecl), 0);
1450 else
1451 /* Generate an rtx (probably a pseudo-register) for the address. */
1453 rtx funaddr;
1454 push_temp_slots ();
1455 funaddr = funexp =
1456 expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1457 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1459 /* Check the function is executable. */
1460 if (current_function_check_memory_usage)
1462 #ifdef POINTERS_EXTEND_UNSIGNED
1463 /* It might be OK to convert funexp in place, but there's
1464 a lot going on between here and when it happens naturally
1465 that this seems safer. */
1466 funaddr = convert_memory_address (Pmode, funexp);
1467 #endif
1468 emit_library_call (chkr_check_exec_libfunc, 1,
1469 VOIDmode, 1,
1470 funaddr, Pmode);
1472 emit_queue ();
1474 return funexp;
1477 /* Do the register loads required for any wholly-register parms or any
1478 parms which are passed both on the stack and in a register. Their
1479 expressions were already evaluated.
1481 Mark all register-parms as living through the call, putting these USE
1482 insns in the CALL_INSN_FUNCTION_USAGE field. */
1484 static void
1485 load_register_parameters (args, num_actuals, call_fusage)
1486 struct arg_data *args;
1487 int num_actuals;
1488 rtx *call_fusage;
1490 int i, j;
1492 #ifdef LOAD_ARGS_REVERSED
1493 for (i = num_actuals - 1; i >= 0; i--)
1494 #else
1495 for (i = 0; i < num_actuals; i++)
1496 #endif
1498 rtx reg = args[i].reg;
1499 int partial = args[i].partial;
1500 int nregs;
1502 if (reg)
1504 /* Set to non-negative if must move a word at a time, even if just
1505 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1506 we just use a normal move insn. This value can be zero if the
1507 argument is a zero size structure with no fields. */
1508 nregs = (partial ? partial
1509 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1510 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1511 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1512 : -1));
1514 /* Handle calls that pass values in multiple non-contiguous
1515 locations. The Irix 6 ABI has examples of this. */
1517 if (GET_CODE (reg) == PARALLEL)
1519 emit_group_load (reg, args[i].value,
1520 int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
1521 (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1522 / BITS_PER_UNIT));
1525 /* If simple case, just do move. If normal partial, store_one_arg
1526 has already loaded the register for us. In all other cases,
1527 load the register(s) from memory. */
1529 else if (nregs == -1)
1530 emit_move_insn (reg, args[i].value);
1532 /* If we have pre-computed the values to put in the registers in
1533 the case of non-aligned structures, copy them in now. */
1535 else if (args[i].n_aligned_regs != 0)
1536 for (j = 0; j < args[i].n_aligned_regs; j++)
1537 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1538 args[i].aligned_regs[j]);
1540 else if (partial == 0 || args[i].pass_on_stack)
1541 move_block_to_reg (REGNO (reg),
1542 validize_mem (args[i].value), nregs,
1543 args[i].mode);
1545 /* Handle calls that pass values in multiple non-contiguous
1546 locations. The Irix 6 ABI has examples of this. */
1547 if (GET_CODE (reg) == PARALLEL)
1548 use_group_regs (call_fusage, reg);
1549 else if (nregs == -1)
1550 use_reg (call_fusage, reg);
1551 else
1552 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1557 /* Generate all the code for a function call
1558 and return an rtx for its value.
1559 Store the value in TARGET (specified as an rtx) if convenient.
1560 If the value is stored in TARGET then TARGET is returned.
1561 If IGNORE is nonzero, then we ignore the value of the function call. */
1564 expand_call (exp, target, ignore)
1565 tree exp;
1566 rtx target;
1567 int ignore;
1569 /* List of actual parameters. */
1570 tree actparms = TREE_OPERAND (exp, 1);
1571 /* RTX for the function to be called. */
1572 rtx funexp;
1573 /* Data type of the function. */
1574 tree funtype;
1575 /* Declaration of the function being called,
1576 or 0 if the function is computed (not known by name). */
1577 tree fndecl = 0;
1578 char *name = 0;
1580 /* Register in which non-BLKmode value will be returned,
1581 or 0 if no value or if value is BLKmode. */
1582 rtx valreg;
1583 /* Address where we should return a BLKmode value;
1584 0 if value not BLKmode. */
1585 rtx structure_value_addr = 0;
1586 /* Nonzero if that address is being passed by treating it as
1587 an extra, implicit first parameter. Otherwise,
1588 it is passed by being copied directly into struct_value_rtx. */
1589 int structure_value_addr_parm = 0;
1590 /* Size of aggregate value wanted, or zero if none wanted
1591 or if we are using the non-reentrant PCC calling convention
1592 or expecting the value in registers. */
1593 HOST_WIDE_INT struct_value_size = 0;
1594 /* Nonzero if called function returns an aggregate in memory PCC style,
1595 by returning the address of where to find it. */
1596 int pcc_struct_value = 0;
1598 /* Number of actual parameters in this call, including struct value addr. */
1599 int num_actuals;
1600 /* Number of named args. Args after this are anonymous ones
1601 and they must all go on the stack. */
1602 int n_named_args;
1604 /* Vector of information about each argument.
1605 Arguments are numbered in the order they will be pushed,
1606 not the order they are written. */
1607 struct arg_data *args;
1609 /* Total size in bytes of all the stack-parms scanned so far. */
1610 struct args_size args_size;
1611 /* Size of arguments before any adjustments (such as rounding). */
1612 int unadjusted_args_size;
1613 /* Data on reg parms scanned so far. */
1614 CUMULATIVE_ARGS args_so_far;
1615 /* Nonzero if a reg parm has been scanned. */
1616 int reg_parm_seen;
1617 /* Nonzero if this is an indirect function call. */
1619 /* Nonzero if we must avoid push-insns in the args for this call.
1620 If stack space is allocated for register parameters, but not by the
1621 caller, then it is preallocated in the fixed part of the stack frame.
1622 So the entire argument block must then be preallocated (i.e., we
1623 ignore PUSH_ROUNDING in that case). */
1625 #ifdef PUSH_ROUNDING
1626 int must_preallocate = 0;
1627 #else
1628 int must_preallocate = 1;
1629 #endif
1631 /* Size of the stack reserved for parameter registers. */
1632 int reg_parm_stack_space = 0;
1634 /* Address of space preallocated for stack parms
1635 (on machines that lack push insns), or 0 if space not preallocated. */
1636 rtx argblock = 0;
1638 /* Nonzero if it is plausible that this is a call to alloca. */
1639 int may_be_alloca;
1640 /* Nonzero if this is a call to malloc or a related function. */
1641 int is_malloc;
1642 /* Nonzero if this is a call to setjmp or a related function. */
1643 int returns_twice;
1644 /* Nonzero if this is a call to `longjmp'. */
1645 int is_longjmp;
1646 /* Nonzero if this is a call to an inline function. */
1647 int is_integrable = 0;
1648 /* Nonzero if this is a call to a `const' function.
1649 Note that only explicitly named functions are handled as `const' here. */
1650 int is_const = 0;
1651 /* Nonzero if this is a call to a `volatile' function. */
1652 int is_volatile = 0;
1653 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1654 /* Define the boundary of the register parm stack space that needs to be
1655 save, if any. */
1656 int low_to_save = -1, high_to_save;
1657 rtx save_area = 0; /* Place that it is saved */
1658 #endif
1660 #ifdef ACCUMULATE_OUTGOING_ARGS
1661 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1662 char *initial_stack_usage_map = stack_usage_map;
1663 int old_stack_arg_under_construction;
1664 #endif
1666 rtx old_stack_level = 0;
1667 int old_pending_adj = 0;
1668 int old_inhibit_defer_pop = inhibit_defer_pop;
1669 rtx call_fusage = 0;
1670 register tree p;
1671 register int i;
1673 /* The value of the function call can be put in a hard register. But
1674 if -fcheck-memory-usage, code which invokes functions (and thus
1675 damages some hard registers) can be inserted before using the value.
1676 So, target is always a pseudo-register in that case. */
1677 if (current_function_check_memory_usage)
1678 target = 0;
1680 /* See if we can find a DECL-node for the actual function.
1681 As a result, decide whether this is a call to an integrable function. */
1683 p = TREE_OPERAND (exp, 0);
1684 if (TREE_CODE (p) == ADDR_EXPR)
1686 fndecl = TREE_OPERAND (p, 0);
1687 if (TREE_CODE (fndecl) != FUNCTION_DECL)
1688 fndecl = 0;
1689 else
1691 if (!flag_no_inline
1692 && fndecl != current_function_decl
1693 && DECL_INLINE (fndecl)
1694 && DECL_SAVED_INSNS (fndecl)
1695 && DECL_SAVED_INSNS (fndecl)->inlinable)
1696 is_integrable = 1;
1697 else if (! TREE_ADDRESSABLE (fndecl))
1699 /* In case this function later becomes inlinable,
1700 record that there was already a non-inline call to it.
1702 Use abstraction instead of setting TREE_ADDRESSABLE
1703 directly. */
1704 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1705 && optimize > 0)
1707 warning_with_decl (fndecl, "can't inline call to `%s'");
1708 warning ("called from here");
1710 mark_addressable (fndecl);
1713 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
1714 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
1715 is_const = 1;
1717 if (TREE_THIS_VOLATILE (fndecl))
1718 is_volatile = 1;
1722 /* If we don't have specific function to call, see if we have a
1723 constant or `noreturn' function from the type. */
1724 if (fndecl == 0)
1726 is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
1727 is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
1730 #ifdef REG_PARM_STACK_SPACE
1731 #ifdef MAYBE_REG_PARM_STACK_SPACE
1732 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1733 #else
1734 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1735 #endif
1736 #endif
1738 #if defined(PUSH_ROUNDING) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1739 if (reg_parm_stack_space > 0)
1740 must_preallocate = 1;
1741 #endif
1743 /* Warn if this value is an aggregate type,
1744 regardless of which calling convention we are using for it. */
1745 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
1746 warning ("function call has aggregate value");
1748 /* Set up a place to return a structure. */
1750 /* Cater to broken compilers. */
1751 if (aggregate_value_p (exp))
1753 /* This call returns a big structure. */
1754 is_const = 0;
1756 #ifdef PCC_STATIC_STRUCT_RETURN
1758 pcc_struct_value = 1;
1759 /* Easier than making that case work right. */
1760 if (is_integrable)
1762 /* In case this is a static function, note that it has been
1763 used. */
1764 if (! TREE_ADDRESSABLE (fndecl))
1765 mark_addressable (fndecl);
1766 is_integrable = 0;
1769 #else /* not PCC_STATIC_STRUCT_RETURN */
1771 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
1773 if (target && GET_CODE (target) == MEM)
1774 structure_value_addr = XEXP (target, 0);
1775 else
1777 /* Assign a temporary to hold the value. */
1778 tree d;
1780 /* For variable-sized objects, we must be called with a target
1781 specified. If we were to allocate space on the stack here,
1782 we would have no way of knowing when to free it. */
1784 if (struct_value_size < 0)
1785 abort ();
1787 /* This DECL is just something to feed to mark_addressable;
1788 it doesn't get pushed. */
1789 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1790 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
1791 mark_addressable (d);
1792 mark_temp_addr_taken (DECL_RTL (d));
1793 structure_value_addr = XEXP (DECL_RTL (d), 0);
1794 TREE_USED (d) = 1;
1795 target = 0;
1798 #endif /* not PCC_STATIC_STRUCT_RETURN */
1801 /* If called function is inline, try to integrate it. */
1803 if (is_integrable)
1805 rtx temp;
1806 #ifdef ACCUMULATE_OUTGOING_ARGS
1807 rtx before_call = get_last_insn ();
1808 #endif
1810 temp = expand_inline_function (fndecl, actparms, target,
1811 ignore, TREE_TYPE (exp),
1812 structure_value_addr);
1814 /* If inlining succeeded, return. */
1815 if (temp != (rtx) (HOST_WIDE_INT) -1)
1817 #ifdef ACCUMULATE_OUTGOING_ARGS
1818 /* If the outgoing argument list must be preserved, push
1819 the stack before executing the inlined function if it
1820 makes any calls. */
1822 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1823 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1824 break;
1826 if (stack_arg_under_construction || i >= 0)
1828 rtx first_insn
1829 = before_call ? NEXT_INSN (before_call) : get_insns ();
1830 rtx insn = NULL_RTX, seq;
1832 /* Look for a call in the inline function code.
1833 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1834 nonzero then there is a call and it is not necessary
1835 to scan the insns. */
1837 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
1838 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1839 if (GET_CODE (insn) == CALL_INSN)
1840 break;
1842 if (insn)
1844 /* Reserve enough stack space so that the largest
1845 argument list of any function call in the inline
1846 function does not overlap the argument list being
1847 evaluated. This is usually an overestimate because
1848 allocate_dynamic_stack_space reserves space for an
1849 outgoing argument list in addition to the requested
1850 space, but there is no way to ask for stack space such
1851 that an argument list of a certain length can be
1852 safely constructed.
1854 Add the stack space reserved for register arguments, if
1855 any, in the inline function. What is really needed is the
1856 largest value of reg_parm_stack_space in the inline
1857 function, but that is not available. Using the current
1858 value of reg_parm_stack_space is wrong, but gives
1859 correct results on all supported machines. */
1861 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
1862 + reg_parm_stack_space);
1864 start_sequence ();
1865 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1866 allocate_dynamic_stack_space (GEN_INT (adjust),
1867 NULL_RTX, BITS_PER_UNIT);
1868 seq = get_insns ();
1869 end_sequence ();
1870 emit_insns_before (seq, first_insn);
1871 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1874 #endif
1876 /* If the result is equivalent to TARGET, return TARGET to simplify
1877 checks in store_expr. They can be equivalent but not equal in the
1878 case of a function that returns BLKmode. */
1879 if (temp != target && rtx_equal_p (temp, target))
1880 return target;
1881 return temp;
1884 /* If inlining failed, mark FNDECL as needing to be compiled
1885 separately after all. If function was declared inline,
1886 give a warning. */
1887 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1888 && optimize > 0 && ! TREE_ADDRESSABLE (fndecl))
1890 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1891 warning ("called from here");
1893 mark_addressable (fndecl);
1896 function_call_count++;
1898 if (fndecl && DECL_NAME (fndecl))
1899 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
1901 /* See if this is a call to a function that can return more than once
1902 or a call to longjmp or malloc. */
1903 special_function_p (fndecl, &returns_twice, &is_longjmp,
1904 &is_malloc, &may_be_alloca);
1906 if (may_be_alloca)
1907 current_function_calls_alloca = 1;
1909 /* Operand 0 is a pointer-to-function; get the type of the function. */
1910 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
1911 if (! POINTER_TYPE_P (funtype))
1912 abort ();
1913 funtype = TREE_TYPE (funtype);
1915 /* When calling a const function, we must pop the stack args right away,
1916 so that the pop is deleted or moved with the call. */
1917 if (is_const)
1918 NO_DEFER_POP;
1920 /* Don't let pending stack adjusts add up to too much.
1921 Also, do all pending adjustments now
1922 if there is any chance this might be a call to alloca. */
1924 if (pending_stack_adjust >= 32
1925 || (pending_stack_adjust > 0 && may_be_alloca))
1926 do_pending_stack_adjust ();
1928 /* Push the temporary stack slot level so that we can free any temporaries
1929 we make. */
1930 push_temp_slots ();
1932 /* Start updating where the next arg would go.
1934 On some machines (such as the PA) indirect calls have a different
1935 calling convention than normal calls. The last argument in
1936 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
1937 or not. */
1938 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
1940 /* If struct_value_rtx is 0, it means pass the address
1941 as if it were an extra parameter. */
1942 if (structure_value_addr && struct_value_rtx == 0)
1944 /* If structure_value_addr is a REG other than
1945 virtual_outgoing_args_rtx, we can use always use it. If it
1946 is not a REG, we must always copy it into a register.
1947 If it is virtual_outgoing_args_rtx, we must copy it to another
1948 register in some cases. */
1949 rtx temp = (GET_CODE (structure_value_addr) != REG
1950 #ifdef ACCUMULATE_OUTGOING_ARGS
1951 || (stack_arg_under_construction
1952 && structure_value_addr == virtual_outgoing_args_rtx)
1953 #endif
1954 ? copy_addr_to_reg (structure_value_addr)
1955 : structure_value_addr);
1957 actparms
1958 = tree_cons (error_mark_node,
1959 make_tree (build_pointer_type (TREE_TYPE (funtype)),
1960 temp),
1961 actparms);
1962 structure_value_addr_parm = 1;
1965 /* Count the arguments and set NUM_ACTUALS. */
1966 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
1967 num_actuals = i;
1969 /* Compute number of named args.
1970 Normally, don't include the last named arg if anonymous args follow.
1971 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
1972 (If no anonymous args follow, the result of list_length is actually
1973 one too large. This is harmless.)
1975 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
1976 zero, this machine will be able to place unnamed args that were passed in
1977 registers into the stack. So treat all args as named. This allows the
1978 insns emitting for a specific argument list to be independent of the
1979 function declaration.
1981 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any reliable
1982 way to pass unnamed args in registers, so we must force them into
1983 memory. */
1985 if ((STRICT_ARGUMENT_NAMING
1986 || ! PRETEND_OUTGOING_VARARGS_NAMED)
1987 && TYPE_ARG_TYPES (funtype) != 0)
1988 n_named_args
1989 = (list_length (TYPE_ARG_TYPES (funtype))
1990 /* Don't include the last named arg. */
1991 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
1992 /* Count the struct value address, if it is passed as a parm. */
1993 + structure_value_addr_parm);
1994 else
1995 /* If we know nothing, treat all args as named. */
1996 n_named_args = num_actuals;
1998 /* Make a vector to hold all the information about each arg. */
1999 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
2000 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
2002 /* Build up entries inthe ARGS array, compute the size of the arguments
2003 into ARGS_SIZE, etc. */
2004 initialize_argument_information (num_actuals, args, &args_size, n_named_args,
2005 actparms, fndecl, &args_so_far,
2006 reg_parm_stack_space, &old_stack_level,
2007 &old_pending_adj, &must_preallocate,
2008 &is_const);
2010 #ifdef FINAL_REG_PARM_STACK_SPACE
2011 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2012 args_size.var);
2013 #endif
2015 if (args_size.var)
2017 /* If this function requires a variable-sized argument list, don't try to
2018 make a cse'able block for this call. We may be able to do this
2019 eventually, but it is too complicated to keep track of what insns go
2020 in the cse'able block and which don't. */
2022 is_const = 0;
2023 must_preallocate = 1;
2026 /* Compute the actual size of the argument block required. The variable
2027 and constant sizes must be combined, the size may have to be rounded,
2028 and there may be a minimum required size. */
2029 unadjusted_args_size
2030 = compute_argument_block_size (reg_parm_stack_space, &args_size);
2032 /* Now make final decision about preallocating stack space. */
2033 must_preallocate = finalize_must_preallocate (must_preallocate,
2034 num_actuals, args, &args_size);
2036 /* If the structure value address will reference the stack pointer, we must
2037 stabilize it. We don't need to do this if we know that we are not going
2038 to adjust the stack pointer in processing this call. */
2040 if (structure_value_addr
2041 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2042 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
2043 && (args_size.var
2044 #ifndef ACCUMULATE_OUTGOING_ARGS
2045 || args_size.constant
2046 #endif
2048 structure_value_addr = copy_to_reg (structure_value_addr);
2050 /* Precompute any arguments as needed. */
2051 precompute_arguments (is_const, must_preallocate, num_actuals,
2052 args, &args_size);
2054 /* Now we are about to start emitting insns that can be deleted
2055 if a libcall is deleted. */
2056 if (is_const || is_malloc)
2057 start_sequence ();
2059 /* If we have no actual push instructions, or shouldn't use them,
2060 make space for all args right now. */
2062 if (args_size.var != 0)
2064 if (old_stack_level == 0)
2066 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2067 old_pending_adj = pending_stack_adjust;
2068 pending_stack_adjust = 0;
2069 #ifdef ACCUMULATE_OUTGOING_ARGS
2070 /* stack_arg_under_construction says whether a stack arg is
2071 being constructed at the old stack level. Pushing the stack
2072 gets a clean outgoing argument block. */
2073 old_stack_arg_under_construction = stack_arg_under_construction;
2074 stack_arg_under_construction = 0;
2075 #endif
2077 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
2079 else
2081 /* Note that we must go through the motions of allocating an argument
2082 block even if the size is zero because we may be storing args
2083 in the area reserved for register arguments, which may be part of
2084 the stack frame. */
2086 int needed = args_size.constant;
2088 /* Store the maximum argument space used. It will be pushed by
2089 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2090 checking). */
2092 if (needed > current_function_outgoing_args_size)
2093 current_function_outgoing_args_size = needed;
2095 if (must_preallocate)
2097 #ifdef ACCUMULATE_OUTGOING_ARGS
2098 /* Since the stack pointer will never be pushed, it is possible for
2099 the evaluation of a parm to clobber something we have already
2100 written to the stack. Since most function calls on RISC machines
2101 do not use the stack, this is uncommon, but must work correctly.
2103 Therefore, we save any area of the stack that was already written
2104 and that we are using. Here we set up to do this by making a new
2105 stack usage map from the old one. The actual save will be done
2106 by store_one_arg.
2108 Another approach might be to try to reorder the argument
2109 evaluations to avoid this conflicting stack usage. */
2111 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2112 /* Since we will be writing into the entire argument area, the
2113 map must be allocated for its entire size, not just the part that
2114 is the responsibility of the caller. */
2115 needed += reg_parm_stack_space;
2116 #endif
2118 #ifdef ARGS_GROW_DOWNWARD
2119 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2120 needed + 1);
2121 #else
2122 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2123 needed);
2124 #endif
2125 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2127 if (initial_highest_arg_in_use)
2128 bcopy (initial_stack_usage_map, stack_usage_map,
2129 initial_highest_arg_in_use);
2131 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2132 bzero (&stack_usage_map[initial_highest_arg_in_use],
2133 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2134 needed = 0;
2136 /* The address of the outgoing argument list must not be copied to a
2137 register here, because argblock would be left pointing to the
2138 wrong place after the call to allocate_dynamic_stack_space below.
2141 argblock = virtual_outgoing_args_rtx;
2143 #else /* not ACCUMULATE_OUTGOING_ARGS */
2144 if (inhibit_defer_pop == 0)
2146 /* Try to reuse some or all of the pending_stack_adjust
2147 to get this space. Maybe we can avoid any pushing. */
2148 if (needed > pending_stack_adjust)
2150 needed -= pending_stack_adjust;
2151 pending_stack_adjust = 0;
2153 else
2155 pending_stack_adjust -= needed;
2156 needed = 0;
2159 /* Special case this because overhead of `push_block' in this
2160 case is non-trivial. */
2161 if (needed == 0)
2162 argblock = virtual_outgoing_args_rtx;
2163 else
2164 argblock = push_block (GEN_INT (needed), 0, 0);
2166 /* We only really need to call `copy_to_reg' in the case where push
2167 insns are going to be used to pass ARGBLOCK to a function
2168 call in ARGS. In that case, the stack pointer changes value
2169 from the allocation point to the call point, and hence
2170 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
2171 But might as well always do it. */
2172 argblock = copy_to_reg (argblock);
2173 #endif /* not ACCUMULATE_OUTGOING_ARGS */
2177 #ifdef ACCUMULATE_OUTGOING_ARGS
2178 /* The save/restore code in store_one_arg handles all cases except one:
2179 a constructor call (including a C function returning a BLKmode struct)
2180 to initialize an argument. */
2181 if (stack_arg_under_construction)
2183 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2184 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
2185 #else
2186 rtx push_size = GEN_INT (args_size.constant);
2187 #endif
2188 if (old_stack_level == 0)
2190 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2191 old_pending_adj = pending_stack_adjust;
2192 pending_stack_adjust = 0;
2193 /* stack_arg_under_construction says whether a stack arg is
2194 being constructed at the old stack level. Pushing the stack
2195 gets a clean outgoing argument block. */
2196 old_stack_arg_under_construction = stack_arg_under_construction;
2197 stack_arg_under_construction = 0;
2198 /* Make a new map for the new argument list. */
2199 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
2200 bzero (stack_usage_map, highest_outgoing_arg_in_use);
2201 highest_outgoing_arg_in_use = 0;
2203 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
2205 /* If argument evaluation might modify the stack pointer, copy the
2206 address of the argument list to a register. */
2207 for (i = 0; i < num_actuals; i++)
2208 if (args[i].pass_on_stack)
2210 argblock = copy_addr_to_reg (argblock);
2211 break;
2213 #endif
2215 compute_argument_addresses (args, argblock, num_actuals);
2217 #ifdef PUSH_ARGS_REVERSED
2218 #ifdef PREFERRED_STACK_BOUNDARY
2219 /* If we push args individually in reverse order, perform stack alignment
2220 before the first push (the last arg). */
2221 if (argblock == 0)
2222 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
2223 #endif
2224 #endif
2226 /* Don't try to defer pops if preallocating, not even from the first arg,
2227 since ARGBLOCK probably refers to the SP. */
2228 if (argblock)
2229 NO_DEFER_POP;
2231 funexp = rtx_for_function_call (fndecl, exp);
2233 /* Figure out the register where the value, if any, will come back. */
2234 valreg = 0;
2235 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2236 && ! structure_value_addr)
2238 if (pcc_struct_value)
2239 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2240 fndecl, 0);
2241 else
2242 valreg = hard_function_value (TREE_TYPE (exp), fndecl, 0);
2245 /* Precompute all register parameters. It isn't safe to compute anything
2246 once we have started filling any specific hard regs. */
2247 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2249 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2251 /* Save the fixed argument area if it's part of the caller's frame and
2252 is clobbered by argument setup for this call. */
2253 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2254 &low_to_save, &high_to_save);
2255 #endif
2258 /* Now store (and compute if necessary) all non-register parms.
2259 These come before register parms, since they can require block-moves,
2260 which could clobber the registers used for register parms.
2261 Parms which have partial registers are not stored here,
2262 but we do preallocate space here if they want that. */
2264 for (i = 0; i < num_actuals; i++)
2265 if (args[i].reg == 0 || args[i].pass_on_stack)
2266 store_one_arg (&args[i], argblock, may_be_alloca,
2267 args_size.var != 0, reg_parm_stack_space);
2269 /* If we have a parm that is passed in registers but not in memory
2270 and whose alignment does not permit a direct copy into registers,
2271 make a group of pseudos that correspond to each register that we
2272 will later fill. */
2273 if (STRICT_ALIGNMENT)
2274 store_unaligned_arguments_into_pseudos (args, num_actuals);
2276 /* Now store any partially-in-registers parm.
2277 This is the last place a block-move can happen. */
2278 if (reg_parm_seen)
2279 for (i = 0; i < num_actuals; i++)
2280 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2281 store_one_arg (&args[i], argblock, may_be_alloca,
2282 args_size.var != 0, reg_parm_stack_space);
2284 #ifndef PUSH_ARGS_REVERSED
2285 #ifdef PREFERRED_STACK_BOUNDARY
2286 /* If we pushed args in forward order, perform stack alignment
2287 after pushing the last arg. */
2288 if (argblock == 0)
2289 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
2290 #endif
2291 #endif
2293 /* If register arguments require space on the stack and stack space
2294 was not preallocated, allocate stack space here for arguments
2295 passed in registers. */
2296 #if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
2297 if (must_preallocate == 0 && reg_parm_stack_space > 0)
2298 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2299 #endif
2301 /* Pass the function the address in which to return a structure value. */
2302 if (structure_value_addr && ! structure_value_addr_parm)
2304 emit_move_insn (struct_value_rtx,
2305 force_reg (Pmode,
2306 force_operand (structure_value_addr,
2307 NULL_RTX)));
2309 /* Mark the memory for the aggregate as write-only. */
2310 if (current_function_check_memory_usage)
2311 emit_library_call (chkr_set_right_libfunc, 1,
2312 VOIDmode, 3,
2313 structure_value_addr, Pmode,
2314 GEN_INT (struct_value_size), TYPE_MODE (sizetype),
2315 GEN_INT (MEMORY_USE_WO),
2316 TYPE_MODE (integer_type_node));
2318 if (GET_CODE (struct_value_rtx) == REG)
2319 use_reg (&call_fusage, struct_value_rtx);
2322 funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
2324 load_register_parameters (args, num_actuals, &call_fusage);
2326 /* Perform postincrements before actually calling the function. */
2327 emit_queue ();
2329 /* All arguments and registers used for the call must be set up by now! */
2331 /* Generate the actual call instruction. */
2332 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
2333 args_size.constant, struct_value_size,
2334 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2335 valreg, old_inhibit_defer_pop, call_fusage, is_const);
2337 /* If call is cse'able, make appropriate pair of reg-notes around it.
2338 Test valreg so we don't crash; may safely ignore `const'
2339 if return type is void. Disable for PARALLEL return values, because
2340 we have no way to move such values into a pseudo register. */
2341 if (is_const && valreg != 0 && GET_CODE (valreg) != PARALLEL)
2343 rtx note = 0;
2344 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2345 rtx insns;
2347 /* Mark the return value as a pointer if needed. */
2348 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2350 tree pointed_to = TREE_TYPE (TREE_TYPE (exp));
2351 mark_reg_pointer (temp, TYPE_ALIGN (pointed_to) / BITS_PER_UNIT);
2354 /* Construct an "equal form" for the value which mentions all the
2355 arguments in order as well as the function name. */
2356 #ifdef PUSH_ARGS_REVERSED
2357 for (i = 0; i < num_actuals; i++)
2358 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2359 #else
2360 for (i = num_actuals - 1; i >= 0; i--)
2361 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2362 #endif
2363 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2365 insns = get_insns ();
2366 end_sequence ();
2368 emit_libcall_block (insns, temp, valreg, note);
2370 valreg = temp;
2372 else if (is_const)
2374 /* Otherwise, just write out the sequence without a note. */
2375 rtx insns = get_insns ();
2377 end_sequence ();
2378 emit_insns (insns);
2380 else if (is_malloc)
2382 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2383 rtx last, insns;
2385 /* The return value from a malloc-like function is a pointer. */
2386 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2387 mark_reg_pointer (temp, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2389 emit_move_insn (temp, valreg);
2391 /* The return value from a malloc-like function can not alias
2392 anything else. */
2393 last = get_last_insn ();
2394 REG_NOTES (last) =
2395 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2397 /* Write out the sequence. */
2398 insns = get_insns ();
2399 end_sequence ();
2400 emit_insns (insns);
2401 valreg = temp;
2404 /* For calls to `setjmp', etc., inform flow.c it should complain
2405 if nonvolatile values are live. */
2407 if (returns_twice)
2409 emit_note (name, NOTE_INSN_SETJMP);
2410 current_function_calls_setjmp = 1;
2413 if (is_longjmp)
2414 current_function_calls_longjmp = 1;
2416 /* Notice functions that cannot return.
2417 If optimizing, insns emitted below will be dead.
2418 If not optimizing, they will exist, which is useful
2419 if the user uses the `return' command in the debugger. */
2421 if (is_volatile || is_longjmp)
2422 emit_barrier ();
2424 /* If value type not void, return an rtx for the value. */
2426 /* If there are cleanups to be called, don't use a hard reg as target.
2427 We need to double check this and see if it matters anymore. */
2428 if (any_pending_cleanups (1)
2429 && target && REG_P (target)
2430 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2431 target = 0;
2433 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2434 || ignore)
2436 target = const0_rtx;
2438 else if (structure_value_addr)
2440 if (target == 0 || GET_CODE (target) != MEM)
2442 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2443 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2444 structure_value_addr));
2445 MEM_SET_IN_STRUCT_P (target,
2446 AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2449 else if (pcc_struct_value)
2451 /* This is the special C++ case where we need to
2452 know what the true target was. We take care to
2453 never use this value more than once in one expression. */
2454 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2455 copy_to_reg (valreg));
2456 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2458 /* Handle calls that return values in multiple non-contiguous locations.
2459 The Irix 6 ABI has examples of this. */
2460 else if (GET_CODE (valreg) == PARALLEL)
2462 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2464 if (target == 0)
2466 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)), bytes, 0);
2467 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2468 preserve_temp_slots (target);
2471 if (! rtx_equal_p (target, valreg))
2472 emit_group_store (target, valreg, bytes,
2473 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2475 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2476 && GET_MODE (target) == GET_MODE (valreg))
2477 /* TARGET and VALREG cannot be equal at this point because the latter
2478 would not have REG_FUNCTION_VALUE_P true, while the former would if
2479 it were referring to the same register.
2481 If they refer to the same register, this move will be a no-op, except
2482 when function inlining is being done. */
2483 emit_move_insn (target, valreg);
2484 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2485 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
2486 else
2487 target = copy_to_reg (valreg);
2489 #ifdef PROMOTE_FUNCTION_RETURN
2490 /* If we promoted this return value, make the proper SUBREG. TARGET
2491 might be const0_rtx here, so be careful. */
2492 if (GET_CODE (target) == REG
2493 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2494 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2496 tree type = TREE_TYPE (exp);
2497 int unsignedp = TREE_UNSIGNED (type);
2499 /* If we don't promote as expected, something is wrong. */
2500 if (GET_MODE (target)
2501 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
2502 abort ();
2504 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
2505 SUBREG_PROMOTED_VAR_P (target) = 1;
2506 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2508 #endif
2510 /* If size of args is variable or this was a constructor call for a stack
2511 argument, restore saved stack-pointer value. */
2513 if (old_stack_level)
2515 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2516 pending_stack_adjust = old_pending_adj;
2517 #ifdef ACCUMULATE_OUTGOING_ARGS
2518 stack_arg_under_construction = old_stack_arg_under_construction;
2519 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2520 stack_usage_map = initial_stack_usage_map;
2521 #endif
2523 #ifdef ACCUMULATE_OUTGOING_ARGS
2524 else
2526 #ifdef REG_PARM_STACK_SPACE
2527 if (save_area)
2528 restore_fixed_argument_area (save_area, argblock,
2529 high_to_save, low_to_save);
2530 #endif
2532 /* If we saved any argument areas, restore them. */
2533 for (i = 0; i < num_actuals; i++)
2534 if (args[i].save_area)
2536 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2537 rtx stack_area
2538 = gen_rtx_MEM (save_mode,
2539 memory_address (save_mode,
2540 XEXP (args[i].stack_slot, 0)));
2542 if (save_mode != BLKmode)
2543 emit_move_insn (stack_area, args[i].save_area);
2544 else
2545 emit_block_move (stack_area, validize_mem (args[i].save_area),
2546 GEN_INT (args[i].size.constant),
2547 PARM_BOUNDARY / BITS_PER_UNIT);
2550 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2551 stack_usage_map = initial_stack_usage_map;
2553 #endif
2555 /* If this was alloca, record the new stack level for nonlocal gotos.
2556 Check for the handler slots since we might not have a save area
2557 for non-local gotos. */
2559 if (may_be_alloca && nonlocal_goto_handler_slots != 0)
2560 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
2562 pop_temp_slots ();
2564 /* Free up storage we no longer need. */
2565 for (i = 0; i < num_actuals; ++i)
2566 if (args[i].aligned_regs)
2567 free (args[i].aligned_regs);
2569 return target;
2572 /* Output a library call to function FUN (a SYMBOL_REF rtx)
2573 (emitting the queue unless NO_QUEUE is nonzero),
2574 for a value of mode OUTMODE,
2575 with NARGS different arguments, passed as alternating rtx values
2576 and machine_modes to convert them to.
2577 The rtx values should have been passed through protect_from_queue already.
2579 NO_QUEUE will be true if and only if the library call is a `const' call
2580 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2581 to the variable is_const in expand_call.
2583 NO_QUEUE must be true for const calls, because if it isn't, then
2584 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2585 and will be lost if the libcall sequence is optimized away.
2587 NO_QUEUE must be false for non-const calls, because if it isn't, the
2588 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2589 optimized. For instance, the instruction scheduler may incorrectly
2590 move memory references across the non-const call. */
2592 void
2593 emit_library_call VPROTO((rtx orgfun, int no_queue, enum machine_mode outmode,
2594 int nargs, ...))
2596 #ifndef ANSI_PROTOTYPES
2597 rtx orgfun;
2598 int no_queue;
2599 enum machine_mode outmode;
2600 int nargs;
2601 #endif
2602 va_list p;
2603 /* Total size in bytes of all the stack-parms scanned so far. */
2604 struct args_size args_size;
2605 /* Size of arguments before any adjustments (such as rounding). */
2606 struct args_size original_args_size;
2607 register int argnum;
2608 rtx fun;
2609 int inc;
2610 int count;
2611 struct args_size alignment_pad;
2612 rtx argblock = 0;
2613 CUMULATIVE_ARGS args_so_far;
2614 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2615 struct args_size offset; struct args_size size; rtx save_area; };
2616 struct arg *argvec;
2617 int old_inhibit_defer_pop = inhibit_defer_pop;
2618 rtx call_fusage = 0;
2619 int reg_parm_stack_space = 0;
2620 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2621 /* Define the boundary of the register parm stack space that needs to be
2622 save, if any. */
2623 int low_to_save = -1, high_to_save = 0;
2624 rtx save_area = 0; /* Place that it is saved */
2625 #endif
2627 #ifdef ACCUMULATE_OUTGOING_ARGS
2628 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2629 char *initial_stack_usage_map = stack_usage_map;
2630 int needed;
2631 #endif
2633 #ifdef REG_PARM_STACK_SPACE
2634 /* Size of the stack reserved for parameter registers. */
2635 #ifdef MAYBE_REG_PARM_STACK_SPACE
2636 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2637 #else
2638 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
2639 #endif
2640 #endif
2642 VA_START (p, nargs);
2644 #ifndef ANSI_PROTOTYPES
2645 orgfun = va_arg (p, rtx);
2646 no_queue = va_arg (p, int);
2647 outmode = va_arg (p, enum machine_mode);
2648 nargs = va_arg (p, int);
2649 #endif
2651 fun = orgfun;
2653 /* Copy all the libcall-arguments out of the varargs data
2654 and into a vector ARGVEC.
2656 Compute how to pass each argument. We only support a very small subset
2657 of the full argument passing conventions to limit complexity here since
2658 library functions shouldn't have many args. */
2660 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2661 bzero ((char *) argvec, nargs * sizeof (struct arg));
2664 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
2666 args_size.constant = 0;
2667 args_size.var = 0;
2669 push_temp_slots ();
2671 for (count = 0; count < nargs; count++)
2673 rtx val = va_arg (p, rtx);
2674 enum machine_mode mode = va_arg (p, enum machine_mode);
2676 /* We cannot convert the arg value to the mode the library wants here;
2677 must do it earlier where we know the signedness of the arg. */
2678 if (mode == BLKmode
2679 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2680 abort ();
2682 /* On some machines, there's no way to pass a float to a library fcn.
2683 Pass it as a double instead. */
2684 #ifdef LIBGCC_NEEDS_DOUBLE
2685 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2686 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
2687 #endif
2689 /* There's no need to call protect_from_queue, because
2690 either emit_move_insn or emit_push_insn will do that. */
2692 /* Make sure it is a reasonable operand for a move or push insn. */
2693 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2694 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2695 val = force_operand (val, NULL_RTX);
2697 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2698 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2700 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2701 be viewed as just an efficiency improvement. */
2702 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2703 emit_move_insn (slot, val);
2704 val = force_operand (XEXP (slot, 0), NULL_RTX);
2705 mode = Pmode;
2707 #endif
2709 argvec[count].value = val;
2710 argvec[count].mode = mode;
2712 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2713 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
2714 abort ();
2715 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2716 argvec[count].partial
2717 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2718 #else
2719 argvec[count].partial = 0;
2720 #endif
2722 locate_and_pad_parm (mode, NULL_TREE,
2723 argvec[count].reg && argvec[count].partial == 0,
2724 NULL_TREE, &args_size, &argvec[count].offset,
2725 &argvec[count].size, &alignment_pad);
2727 if (argvec[count].size.var)
2728 abort ();
2730 if (reg_parm_stack_space == 0 && argvec[count].partial)
2731 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2733 if (argvec[count].reg == 0 || argvec[count].partial != 0
2734 || reg_parm_stack_space > 0)
2735 args_size.constant += argvec[count].size.constant;
2737 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
2739 va_end (p);
2741 #ifdef FINAL_REG_PARM_STACK_SPACE
2742 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2743 args_size.var);
2744 #endif
2746 /* If this machine requires an external definition for library
2747 functions, write one out. */
2748 assemble_external_libcall (fun);
2750 original_args_size = args_size;
2751 #ifdef PREFERRED_STACK_BOUNDARY
2752 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2753 / STACK_BYTES) * STACK_BYTES);
2754 #endif
2756 args_size.constant = MAX (args_size.constant,
2757 reg_parm_stack_space);
2759 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2760 args_size.constant -= reg_parm_stack_space;
2761 #endif
2763 if (args_size.constant > current_function_outgoing_args_size)
2764 current_function_outgoing_args_size = args_size.constant;
2766 #ifdef ACCUMULATE_OUTGOING_ARGS
2767 /* Since the stack pointer will never be pushed, it is possible for
2768 the evaluation of a parm to clobber something we have already
2769 written to the stack. Since most function calls on RISC machines
2770 do not use the stack, this is uncommon, but must work correctly.
2772 Therefore, we save any area of the stack that was already written
2773 and that we are using. Here we set up to do this by making a new
2774 stack usage map from the old one.
2776 Another approach might be to try to reorder the argument
2777 evaluations to avoid this conflicting stack usage. */
2779 needed = args_size.constant;
2781 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2782 /* Since we will be writing into the entire argument area, the
2783 map must be allocated for its entire size, not just the part that
2784 is the responsibility of the caller. */
2785 needed += reg_parm_stack_space;
2786 #endif
2788 #ifdef ARGS_GROW_DOWNWARD
2789 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2790 needed + 1);
2791 #else
2792 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2793 needed);
2794 #endif
2795 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2797 if (initial_highest_arg_in_use)
2798 bcopy (initial_stack_usage_map, stack_usage_map,
2799 initial_highest_arg_in_use);
2801 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2802 bzero (&stack_usage_map[initial_highest_arg_in_use],
2803 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2804 needed = 0;
2806 /* The address of the outgoing argument list must not be copied to a
2807 register here, because argblock would be left pointing to the
2808 wrong place after the call to allocate_dynamic_stack_space below.
2811 argblock = virtual_outgoing_args_rtx;
2812 #else /* not ACCUMULATE_OUTGOING_ARGS */
2813 #ifndef PUSH_ROUNDING
2814 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2815 #endif
2816 #endif
2818 #ifdef PUSH_ARGS_REVERSED
2819 #ifdef PREFERRED_STACK_BOUNDARY
2820 /* If we push args individually in reverse order, perform stack alignment
2821 before the first push (the last arg). */
2822 if (argblock == 0)
2823 anti_adjust_stack (GEN_INT (args_size.constant
2824 - original_args_size.constant));
2825 #endif
2826 #endif
2828 #ifdef PUSH_ARGS_REVERSED
2829 inc = -1;
2830 argnum = nargs - 1;
2831 #else
2832 inc = 1;
2833 argnum = 0;
2834 #endif
2836 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2837 /* The argument list is the property of the called routine and it
2838 may clobber it. If the fixed area has been used for previous
2839 parameters, we must save and restore it.
2841 Here we compute the boundary of the that needs to be saved, if any. */
2843 #ifdef ARGS_GROW_DOWNWARD
2844 for (count = 0; count < reg_parm_stack_space + 1; count++)
2845 #else
2846 for (count = 0; count < reg_parm_stack_space; count++)
2847 #endif
2849 if (count >= highest_outgoing_arg_in_use
2850 || stack_usage_map[count] == 0)
2851 continue;
2853 if (low_to_save == -1)
2854 low_to_save = count;
2856 high_to_save = count;
2859 if (low_to_save >= 0)
2861 int num_to_save = high_to_save - low_to_save + 1;
2862 enum machine_mode save_mode
2863 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
2864 rtx stack_area;
2866 /* If we don't have the required alignment, must do this in BLKmode. */
2867 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
2868 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
2869 save_mode = BLKmode;
2871 #ifdef ARGS_GROW_DOWNWARD
2872 stack_area = gen_rtx_MEM (save_mode,
2873 memory_address (save_mode,
2874 plus_constant (argblock,
2875 - high_to_save)));
2876 #else
2877 stack_area = gen_rtx_MEM (save_mode,
2878 memory_address (save_mode,
2879 plus_constant (argblock,
2880 low_to_save)));
2881 #endif
2882 if (save_mode == BLKmode)
2884 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
2885 emit_block_move (validize_mem (save_area), stack_area,
2886 GEN_INT (num_to_save),
2887 PARM_BOUNDARY / BITS_PER_UNIT);
2889 else
2891 save_area = gen_reg_rtx (save_mode);
2892 emit_move_insn (save_area, stack_area);
2895 #endif
2897 /* Push the args that need to be pushed. */
2899 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2900 are to be pushed. */
2901 for (count = 0; count < nargs; count++, argnum += inc)
2903 register enum machine_mode mode = argvec[argnum].mode;
2904 register rtx val = argvec[argnum].value;
2905 rtx reg = argvec[argnum].reg;
2906 int partial = argvec[argnum].partial;
2907 #ifdef ACCUMULATE_OUTGOING_ARGS
2908 int lower_bound, upper_bound, i;
2909 #endif
2911 if (! (reg != 0 && partial == 0))
2913 #ifdef ACCUMULATE_OUTGOING_ARGS
2914 /* If this is being stored into a pre-allocated, fixed-size, stack
2915 area, save any previous data at that location. */
2917 #ifdef ARGS_GROW_DOWNWARD
2918 /* stack_slot is negative, but we want to index stack_usage_map
2919 with positive values. */
2920 upper_bound = -argvec[argnum].offset.constant + 1;
2921 lower_bound = upper_bound - argvec[argnum].size.constant;
2922 #else
2923 lower_bound = argvec[argnum].offset.constant;
2924 upper_bound = lower_bound + argvec[argnum].size.constant;
2925 #endif
2927 for (i = lower_bound; i < upper_bound; i++)
2928 if (stack_usage_map[i]
2929 /* Don't store things in the fixed argument area at this point;
2930 it has already been saved. */
2931 && i > reg_parm_stack_space)
2932 break;
2934 if (i != upper_bound)
2936 /* We need to make a save area. See what mode we can make it. */
2937 enum machine_mode save_mode
2938 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
2939 MODE_INT, 1);
2940 rtx stack_area
2941 = gen_rtx_MEM
2942 (save_mode,
2943 memory_address
2944 (save_mode,
2945 plus_constant (argblock,
2946 argvec[argnum].offset.constant)));
2948 argvec[argnum].save_area = gen_reg_rtx (save_mode);
2949 emit_move_insn (argvec[argnum].save_area, stack_area);
2951 #endif
2952 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2953 argblock, GEN_INT (argvec[argnum].offset.constant),
2954 reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
2956 #ifdef ACCUMULATE_OUTGOING_ARGS
2957 /* Now mark the segment we just used. */
2958 for (i = lower_bound; i < upper_bound; i++)
2959 stack_usage_map[i] = 1;
2960 #endif
2962 NO_DEFER_POP;
2966 #ifndef PUSH_ARGS_REVERSED
2967 #ifdef PREFERRED_STACK_BOUNDARY
2968 /* If we pushed args in forward order, perform stack alignment
2969 after pushing the last arg. */
2970 if (argblock == 0)
2971 anti_adjust_stack (GEN_INT (args_size.constant
2972 - original_args_size.constant));
2973 #endif
2974 #endif
2976 #ifdef PUSH_ARGS_REVERSED
2977 argnum = nargs - 1;
2978 #else
2979 argnum = 0;
2980 #endif
2982 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
2984 /* Now load any reg parms into their regs. */
2986 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2987 are to be pushed. */
2988 for (count = 0; count < nargs; count++, argnum += inc)
2990 register rtx val = argvec[argnum].value;
2991 rtx reg = argvec[argnum].reg;
2992 int partial = argvec[argnum].partial;
2994 if (reg != 0 && partial == 0)
2995 emit_move_insn (reg, val);
2996 NO_DEFER_POP;
2999 /* For version 1.37, try deleting this entirely. */
3000 if (! no_queue)
3001 emit_queue ();
3003 /* Any regs containing parms remain in use through the call. */
3004 for (count = 0; count < nargs; count++)
3005 if (argvec[count].reg != 0)
3006 use_reg (&call_fusage, argvec[count].reg);
3008 /* Don't allow popping to be deferred, since then
3009 cse'ing of library calls could delete a call and leave the pop. */
3010 NO_DEFER_POP;
3012 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3013 will set inhibit_defer_pop to that value. */
3015 /* The return type is needed to decide how many bytes the function pops.
3016 Signedness plays no role in that, so for simplicity, we pretend it's
3017 always signed. We also assume that the list of arguments passed has
3018 no impact, so we pretend it is unknown. */
3020 emit_call_1 (fun,
3021 get_identifier (XSTR (orgfun, 0)),
3022 build_function_type (outmode == VOIDmode ? void_type_node
3023 : type_for_mode (outmode, 0), NULL_TREE),
3024 original_args_size.constant, args_size.constant, 0,
3025 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3026 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
3027 old_inhibit_defer_pop + 1, call_fusage, no_queue);
3029 pop_temp_slots ();
3031 /* Now restore inhibit_defer_pop to its actual original value. */
3032 OK_DEFER_POP;
3034 #ifdef ACCUMULATE_OUTGOING_ARGS
3035 #ifdef REG_PARM_STACK_SPACE
3036 if (save_area)
3038 enum machine_mode save_mode = GET_MODE (save_area);
3039 #ifdef ARGS_GROW_DOWNWARD
3040 rtx stack_area
3041 = gen_rtx_MEM (save_mode,
3042 memory_address (save_mode,
3043 plus_constant (argblock,
3044 - high_to_save)));
3045 #else
3046 rtx stack_area
3047 = gen_rtx_MEM (save_mode,
3048 memory_address (save_mode,
3049 plus_constant (argblock, low_to_save)));
3050 #endif
3052 if (save_mode != BLKmode)
3053 emit_move_insn (stack_area, save_area);
3054 else
3055 emit_block_move (stack_area, validize_mem (save_area),
3056 GEN_INT (high_to_save - low_to_save + 1),
3057 PARM_BOUNDARY / BITS_PER_UNIT);
3059 #endif
3061 /* If we saved any argument areas, restore them. */
3062 for (count = 0; count < nargs; count++)
3063 if (argvec[count].save_area)
3065 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3066 rtx stack_area
3067 = gen_rtx_MEM (save_mode,
3068 memory_address
3069 (save_mode,
3070 plus_constant (argblock,
3071 argvec[count].offset.constant)));
3073 emit_move_insn (stack_area, argvec[count].save_area);
3076 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3077 stack_usage_map = initial_stack_usage_map;
3078 #endif
3081 /* Like emit_library_call except that an extra argument, VALUE,
3082 comes second and says where to store the result.
3083 (If VALUE is zero, this function chooses a convenient way
3084 to return the value.
3086 This function returns an rtx for where the value is to be found.
3087 If VALUE is nonzero, VALUE is returned. */
3090 emit_library_call_value VPROTO((rtx orgfun, rtx value, int no_queue,
3091 enum machine_mode outmode, int nargs, ...))
3093 #ifndef ANSI_PROTOTYPES
3094 rtx orgfun;
3095 rtx value;
3096 int no_queue;
3097 enum machine_mode outmode;
3098 int nargs;
3099 #endif
3100 va_list p;
3101 /* Total size in bytes of all the stack-parms scanned so far. */
3102 struct args_size args_size;
3103 /* Size of arguments before any adjustments (such as rounding). */
3104 struct args_size original_args_size;
3105 register int argnum;
3106 rtx fun;
3107 int inc;
3108 int count;
3109 struct args_size alignment_pad;
3110 rtx argblock = 0;
3111 CUMULATIVE_ARGS args_so_far;
3112 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
3113 struct args_size offset; struct args_size size; rtx save_area; };
3114 struct arg *argvec;
3115 int old_inhibit_defer_pop = inhibit_defer_pop;
3116 rtx call_fusage = 0;
3117 rtx mem_value = 0;
3118 int pcc_struct_value = 0;
3119 int struct_value_size = 0;
3120 int is_const;
3121 int reg_parm_stack_space = 0;
3122 #ifdef ACCUMULATE_OUTGOING_ARGS
3123 int needed;
3124 #endif
3126 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3127 /* Define the boundary of the register parm stack space that needs to be
3128 save, if any. */
3129 int low_to_save = -1, high_to_save = 0;
3130 rtx save_area = 0; /* Place that it is saved */
3131 #endif
3133 #ifdef ACCUMULATE_OUTGOING_ARGS
3134 /* Size of the stack reserved for parameter registers. */
3135 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3136 char *initial_stack_usage_map = stack_usage_map;
3137 #endif
3139 #ifdef REG_PARM_STACK_SPACE
3140 #ifdef MAYBE_REG_PARM_STACK_SPACE
3141 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3142 #else
3143 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3144 #endif
3145 #endif
3147 VA_START (p, nargs);
3149 #ifndef ANSI_PROTOTYPES
3150 orgfun = va_arg (p, rtx);
3151 value = va_arg (p, rtx);
3152 no_queue = va_arg (p, int);
3153 outmode = va_arg (p, enum machine_mode);
3154 nargs = va_arg (p, int);
3155 #endif
3157 is_const = no_queue;
3158 fun = orgfun;
3160 /* If this kind of value comes back in memory,
3161 decide where in memory it should come back. */
3162 if (aggregate_value_p (type_for_mode (outmode, 0)))
3164 #ifdef PCC_STATIC_STRUCT_RETURN
3165 rtx pointer_reg
3166 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
3167 0, 0);
3168 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3169 pcc_struct_value = 1;
3170 if (value == 0)
3171 value = gen_reg_rtx (outmode);
3172 #else /* not PCC_STATIC_STRUCT_RETURN */
3173 struct_value_size = GET_MODE_SIZE (outmode);
3174 if (value != 0 && GET_CODE (value) == MEM)
3175 mem_value = value;
3176 else
3177 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
3178 #endif
3180 /* This call returns a big structure. */
3181 is_const = 0;
3184 /* ??? Unfinished: must pass the memory address as an argument. */
3186 /* Copy all the libcall-arguments out of the varargs data
3187 and into a vector ARGVEC.
3189 Compute how to pass each argument. We only support a very small subset
3190 of the full argument passing conventions to limit complexity here since
3191 library functions shouldn't have many args. */
3193 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
3194 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
3196 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3198 args_size.constant = 0;
3199 args_size.var = 0;
3201 count = 0;
3203 push_temp_slots ();
3205 /* If there's a structure value address to be passed,
3206 either pass it in the special place, or pass it as an extra argument. */
3207 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3209 rtx addr = XEXP (mem_value, 0);
3210 nargs++;
3212 /* Make sure it is a reasonable operand for a move or push insn. */
3213 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3214 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3215 addr = force_operand (addr, NULL_RTX);
3217 argvec[count].value = addr;
3218 argvec[count].mode = Pmode;
3219 argvec[count].partial = 0;
3221 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3222 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3223 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3224 abort ();
3225 #endif
3227 locate_and_pad_parm (Pmode, NULL_TREE,
3228 argvec[count].reg && argvec[count].partial == 0,
3229 NULL_TREE, &args_size, &argvec[count].offset,
3230 &argvec[count].size, &alignment_pad);
3233 if (argvec[count].reg == 0 || argvec[count].partial != 0
3234 || reg_parm_stack_space > 0)
3235 args_size.constant += argvec[count].size.constant;
3237 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3239 count++;
3242 for (; count < nargs; count++)
3244 rtx val = va_arg (p, rtx);
3245 enum machine_mode mode = va_arg (p, enum machine_mode);
3247 /* We cannot convert the arg value to the mode the library wants here;
3248 must do it earlier where we know the signedness of the arg. */
3249 if (mode == BLKmode
3250 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3251 abort ();
3253 /* On some machines, there's no way to pass a float to a library fcn.
3254 Pass it as a double instead. */
3255 #ifdef LIBGCC_NEEDS_DOUBLE
3256 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3257 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3258 #endif
3260 /* There's no need to call protect_from_queue, because
3261 either emit_move_insn or emit_push_insn will do that. */
3263 /* Make sure it is a reasonable operand for a move or push insn. */
3264 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3265 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3266 val = force_operand (val, NULL_RTX);
3268 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3269 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3271 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
3272 be viewed as just an efficiency improvement. */
3273 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3274 emit_move_insn (slot, val);
3275 val = XEXP (slot, 0);
3276 mode = Pmode;
3278 #endif
3280 argvec[count].value = val;
3281 argvec[count].mode = mode;
3283 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3284 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
3285 abort ();
3286 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3287 argvec[count].partial
3288 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3289 #else
3290 argvec[count].partial = 0;
3291 #endif
3293 locate_and_pad_parm (mode, NULL_TREE,
3294 argvec[count].reg && argvec[count].partial == 0,
3295 NULL_TREE, &args_size, &argvec[count].offset,
3296 &argvec[count].size, &alignment_pad);
3298 if (argvec[count].size.var)
3299 abort ();
3301 if (reg_parm_stack_space == 0 && argvec[count].partial)
3302 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3304 if (argvec[count].reg == 0 || argvec[count].partial != 0
3305 || reg_parm_stack_space > 0)
3306 args_size.constant += argvec[count].size.constant;
3308 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3310 va_end (p);
3312 #ifdef FINAL_REG_PARM_STACK_SPACE
3313 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3314 args_size.var);
3315 #endif
3316 /* If this machine requires an external definition for library
3317 functions, write one out. */
3318 assemble_external_libcall (fun);
3320 original_args_size = args_size;
3321 #ifdef PREFERRED_STACK_BOUNDARY
3322 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
3323 / STACK_BYTES) * STACK_BYTES);
3324 #endif
3326 args_size.constant = MAX (args_size.constant,
3327 reg_parm_stack_space);
3329 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3330 args_size.constant -= reg_parm_stack_space;
3331 #endif
3333 if (args_size.constant > current_function_outgoing_args_size)
3334 current_function_outgoing_args_size = args_size.constant;
3336 #ifdef ACCUMULATE_OUTGOING_ARGS
3337 /* Since the stack pointer will never be pushed, it is possible for
3338 the evaluation of a parm to clobber something we have already
3339 written to the stack. Since most function calls on RISC machines
3340 do not use the stack, this is uncommon, but must work correctly.
3342 Therefore, we save any area of the stack that was already written
3343 and that we are using. Here we set up to do this by making a new
3344 stack usage map from the old one.
3346 Another approach might be to try to reorder the argument
3347 evaluations to avoid this conflicting stack usage. */
3349 needed = args_size.constant;
3351 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3352 /* Since we will be writing into the entire argument area, the
3353 map must be allocated for its entire size, not just the part that
3354 is the responsibility of the caller. */
3355 needed += reg_parm_stack_space;
3356 #endif
3358 #ifdef ARGS_GROW_DOWNWARD
3359 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3360 needed + 1);
3361 #else
3362 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3363 needed);
3364 #endif
3365 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3367 if (initial_highest_arg_in_use)
3368 bcopy (initial_stack_usage_map, stack_usage_map,
3369 initial_highest_arg_in_use);
3371 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3372 bzero (&stack_usage_map[initial_highest_arg_in_use],
3373 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3374 needed = 0;
3376 /* The address of the outgoing argument list must not be copied to a
3377 register here, because argblock would be left pointing to the
3378 wrong place after the call to allocate_dynamic_stack_space below.
3381 argblock = virtual_outgoing_args_rtx;
3382 #else /* not ACCUMULATE_OUTGOING_ARGS */
3383 #ifndef PUSH_ROUNDING
3384 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3385 #endif
3386 #endif
3388 #ifdef PUSH_ARGS_REVERSED
3389 #ifdef PREFERRED_STACK_BOUNDARY
3390 /* If we push args individually in reverse order, perform stack alignment
3391 before the first push (the last arg). */
3392 if (argblock == 0)
3393 anti_adjust_stack (GEN_INT (args_size.constant
3394 - original_args_size.constant));
3395 #endif
3396 #endif
3398 #ifdef PUSH_ARGS_REVERSED
3399 inc = -1;
3400 argnum = nargs - 1;
3401 #else
3402 inc = 1;
3403 argnum = 0;
3404 #endif
3406 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3407 /* The argument list is the property of the called routine and it
3408 may clobber it. If the fixed area has been used for previous
3409 parameters, we must save and restore it.
3411 Here we compute the boundary of the that needs to be saved, if any. */
3413 #ifdef ARGS_GROW_DOWNWARD
3414 for (count = 0; count < reg_parm_stack_space + 1; count++)
3415 #else
3416 for (count = 0; count < reg_parm_stack_space; count++)
3417 #endif
3419 if (count >= highest_outgoing_arg_in_use
3420 || stack_usage_map[count] == 0)
3421 continue;
3423 if (low_to_save == -1)
3424 low_to_save = count;
3426 high_to_save = count;
3429 if (low_to_save >= 0)
3431 int num_to_save = high_to_save - low_to_save + 1;
3432 enum machine_mode save_mode
3433 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3434 rtx stack_area;
3436 /* If we don't have the required alignment, must do this in BLKmode. */
3437 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3438 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3439 save_mode = BLKmode;
3441 #ifdef ARGS_GROW_DOWNWARD
3442 stack_area = gen_rtx_MEM (save_mode,
3443 memory_address (save_mode,
3444 plus_constant (argblock,
3445 - high_to_save)));
3446 #else
3447 stack_area = gen_rtx_MEM (save_mode,
3448 memory_address (save_mode,
3449 plus_constant (argblock,
3450 low_to_save)));
3451 #endif
3452 if (save_mode == BLKmode)
3454 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3455 emit_block_move (validize_mem (save_area), stack_area,
3456 GEN_INT (num_to_save),
3457 PARM_BOUNDARY / BITS_PER_UNIT);
3459 else
3461 save_area = gen_reg_rtx (save_mode);
3462 emit_move_insn (save_area, stack_area);
3465 #endif
3467 /* Push the args that need to be pushed. */
3469 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3470 are to be pushed. */
3471 for (count = 0; count < nargs; count++, argnum += inc)
3473 register enum machine_mode mode = argvec[argnum].mode;
3474 register rtx val = argvec[argnum].value;
3475 rtx reg = argvec[argnum].reg;
3476 int partial = argvec[argnum].partial;
3477 #ifdef ACCUMULATE_OUTGOING_ARGS
3478 int lower_bound, upper_bound, i;
3479 #endif
3481 if (! (reg != 0 && partial == 0))
3483 #ifdef ACCUMULATE_OUTGOING_ARGS
3484 /* If this is being stored into a pre-allocated, fixed-size, stack
3485 area, save any previous data at that location. */
3487 #ifdef ARGS_GROW_DOWNWARD
3488 /* stack_slot is negative, but we want to index stack_usage_map
3489 with positive values. */
3490 upper_bound = -argvec[argnum].offset.constant + 1;
3491 lower_bound = upper_bound - argvec[argnum].size.constant;
3492 #else
3493 lower_bound = argvec[argnum].offset.constant;
3494 upper_bound = lower_bound + argvec[argnum].size.constant;
3495 #endif
3497 for (i = lower_bound; i < upper_bound; i++)
3498 if (stack_usage_map[i]
3499 /* Don't store things in the fixed argument area at this point;
3500 it has already been saved. */
3501 && i > reg_parm_stack_space)
3502 break;
3504 if (i != upper_bound)
3506 /* We need to make a save area. See what mode we can make it. */
3507 enum machine_mode save_mode
3508 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
3509 MODE_INT, 1);
3510 rtx stack_area
3511 = gen_rtx_MEM
3512 (save_mode,
3513 memory_address
3514 (save_mode,
3515 plus_constant (argblock,
3516 argvec[argnum].offset.constant)));
3517 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3519 emit_move_insn (argvec[argnum].save_area, stack_area);
3521 #endif
3522 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
3523 argblock, GEN_INT (argvec[argnum].offset.constant),
3524 reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
3526 #ifdef ACCUMULATE_OUTGOING_ARGS
3527 /* Now mark the segment we just used. */
3528 for (i = lower_bound; i < upper_bound; i++)
3529 stack_usage_map[i] = 1;
3530 #endif
3532 NO_DEFER_POP;
3536 #ifndef PUSH_ARGS_REVERSED
3537 #ifdef PREFERRED_STACK_BOUNDARY
3538 /* If we pushed args in forward order, perform stack alignment
3539 after pushing the last arg. */
3540 if (argblock == 0)
3541 anti_adjust_stack (GEN_INT (args_size.constant
3542 - original_args_size.constant));
3543 #endif
3544 #endif
3546 #ifdef PUSH_ARGS_REVERSED
3547 argnum = nargs - 1;
3548 #else
3549 argnum = 0;
3550 #endif
3552 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
3554 /* Now load any reg parms into their regs. */
3556 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3557 are to be pushed. */
3558 for (count = 0; count < nargs; count++, argnum += inc)
3560 register rtx val = argvec[argnum].value;
3561 rtx reg = argvec[argnum].reg;
3562 int partial = argvec[argnum].partial;
3564 if (reg != 0 && partial == 0)
3565 emit_move_insn (reg, val);
3566 NO_DEFER_POP;
3569 #if 0
3570 /* For version 1.37, try deleting this entirely. */
3571 if (! no_queue)
3572 emit_queue ();
3573 #endif
3575 /* Any regs containing parms remain in use through the call. */
3576 for (count = 0; count < nargs; count++)
3577 if (argvec[count].reg != 0)
3578 use_reg (&call_fusage, argvec[count].reg);
3580 /* Pass the function the address in which to return a structure value. */
3581 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3583 emit_move_insn (struct_value_rtx,
3584 force_reg (Pmode,
3585 force_operand (XEXP (mem_value, 0),
3586 NULL_RTX)));
3587 if (GET_CODE (struct_value_rtx) == REG)
3588 use_reg (&call_fusage, struct_value_rtx);
3591 /* Don't allow popping to be deferred, since then
3592 cse'ing of library calls could delete a call and leave the pop. */
3593 NO_DEFER_POP;
3595 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3596 will set inhibit_defer_pop to that value. */
3597 /* See the comment in emit_library_call about the function type we build
3598 and pass here. */
3600 emit_call_1 (fun,
3601 get_identifier (XSTR (orgfun, 0)),
3602 build_function_type (type_for_mode (outmode, 0), NULL_TREE),
3603 original_args_size.constant, args_size.constant,
3604 struct_value_size,
3605 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3606 mem_value == 0 ? hard_libcall_value (outmode) : NULL_RTX,
3607 old_inhibit_defer_pop + 1, call_fusage, is_const);
3609 /* Now restore inhibit_defer_pop to its actual original value. */
3610 OK_DEFER_POP;
3612 pop_temp_slots ();
3614 /* Copy the value to the right place. */
3615 if (outmode != VOIDmode)
3617 if (mem_value)
3619 if (value == 0)
3620 value = mem_value;
3621 if (value != mem_value)
3622 emit_move_insn (value, mem_value);
3624 else if (value != 0)
3625 emit_move_insn (value, hard_libcall_value (outmode));
3626 else
3627 value = hard_libcall_value (outmode);
3630 #ifdef ACCUMULATE_OUTGOING_ARGS
3631 #ifdef REG_PARM_STACK_SPACE
3632 if (save_area)
3634 enum machine_mode save_mode = GET_MODE (save_area);
3635 #ifdef ARGS_GROW_DOWNWARD
3636 rtx stack_area
3637 = gen_rtx_MEM (save_mode,
3638 memory_address (save_mode,
3639 plus_constant (argblock,
3640 - high_to_save)));
3641 #else
3642 rtx stack_area
3643 = gen_rtx_MEM (save_mode,
3644 memory_address (save_mode,
3645 plus_constant (argblock, low_to_save)));
3646 #endif
3647 if (save_mode != BLKmode)
3648 emit_move_insn (stack_area, save_area);
3649 else
3650 emit_block_move (stack_area, validize_mem (save_area),
3651 GEN_INT (high_to_save - low_to_save + 1),
3652 PARM_BOUNDARY / BITS_PER_UNIT);
3654 #endif
3656 /* If we saved any argument areas, restore them. */
3657 for (count = 0; count < nargs; count++)
3658 if (argvec[count].save_area)
3660 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3661 rtx stack_area
3662 = gen_rtx_MEM (save_mode,
3663 memory_address
3664 (save_mode,
3665 plus_constant (argblock,
3666 argvec[count].offset.constant)));
3668 emit_move_insn (stack_area, argvec[count].save_area);
3671 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3672 stack_usage_map = initial_stack_usage_map;
3673 #endif
3675 return value;
3678 #if 0
3679 /* Return an rtx which represents a suitable home on the stack
3680 given TYPE, the type of the argument looking for a home.
3681 This is called only for BLKmode arguments.
3683 SIZE is the size needed for this target.
3684 ARGS_ADDR is the address of the bottom of the argument block for this call.
3685 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3686 if this machine uses push insns. */
3688 static rtx
3689 target_for_arg (type, size, args_addr, offset)
3690 tree type;
3691 rtx size;
3692 rtx args_addr;
3693 struct args_size offset;
3695 rtx target;
3696 rtx offset_rtx = ARGS_SIZE_RTX (offset);
3698 /* We do not call memory_address if possible,
3699 because we want to address as close to the stack
3700 as possible. For non-variable sized arguments,
3701 this will be stack-pointer relative addressing. */
3702 if (GET_CODE (offset_rtx) == CONST_INT)
3703 target = plus_constant (args_addr, INTVAL (offset_rtx));
3704 else
3706 /* I have no idea how to guarantee that this
3707 will work in the presence of register parameters. */
3708 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
3709 target = memory_address (QImode, target);
3712 return gen_rtx_MEM (BLKmode, target);
3714 #endif
3716 /* Store a single argument for a function call
3717 into the register or memory area where it must be passed.
3718 *ARG describes the argument value and where to pass it.
3720 ARGBLOCK is the address of the stack-block for all the arguments,
3721 or 0 on a machine where arguments are pushed individually.
3723 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3724 so must be careful about how the stack is used.
3726 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3727 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3728 that we need not worry about saving and restoring the stack.
3730 FNDECL is the declaration of the function we are calling. */
3732 static void
3733 store_one_arg (arg, argblock, may_be_alloca, variable_size,
3734 reg_parm_stack_space)
3735 struct arg_data *arg;
3736 rtx argblock;
3737 int may_be_alloca;
3738 int variable_size ATTRIBUTE_UNUSED;
3739 int reg_parm_stack_space;
3741 register tree pval = arg->tree_value;
3742 rtx reg = 0;
3743 int partial = 0;
3744 int used = 0;
3745 #ifdef ACCUMULATE_OUTGOING_ARGS
3746 int i, lower_bound = 0, upper_bound = 0;
3747 #endif
3749 if (TREE_CODE (pval) == ERROR_MARK)
3750 return;
3752 /* Push a new temporary level for any temporaries we make for
3753 this argument. */
3754 push_temp_slots ();
3756 #ifdef ACCUMULATE_OUTGOING_ARGS
3757 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3758 save any previous data at that location. */
3759 if (argblock && ! variable_size && arg->stack)
3761 #ifdef ARGS_GROW_DOWNWARD
3762 /* stack_slot is negative, but we want to index stack_usage_map
3763 with positive values. */
3764 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3765 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3766 else
3767 upper_bound = 0;
3769 lower_bound = upper_bound - arg->size.constant;
3770 #else
3771 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3772 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3773 else
3774 lower_bound = 0;
3776 upper_bound = lower_bound + arg->size.constant;
3777 #endif
3779 for (i = lower_bound; i < upper_bound; i++)
3780 if (stack_usage_map[i]
3781 /* Don't store things in the fixed argument area at this point;
3782 it has already been saved. */
3783 && i > reg_parm_stack_space)
3784 break;
3786 if (i != upper_bound)
3788 /* We need to make a save area. See what mode we can make it. */
3789 enum machine_mode save_mode
3790 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
3791 rtx stack_area
3792 = gen_rtx_MEM (save_mode,
3793 memory_address (save_mode,
3794 XEXP (arg->stack_slot, 0)));
3796 if (save_mode == BLKmode)
3798 arg->save_area = assign_stack_temp (BLKmode,
3799 arg->size.constant, 0);
3800 MEM_SET_IN_STRUCT_P (arg->save_area,
3801 AGGREGATE_TYPE_P (TREE_TYPE
3802 (arg->tree_value)));
3803 preserve_temp_slots (arg->save_area);
3804 emit_block_move (validize_mem (arg->save_area), stack_area,
3805 GEN_INT (arg->size.constant),
3806 PARM_BOUNDARY / BITS_PER_UNIT);
3808 else
3810 arg->save_area = gen_reg_rtx (save_mode);
3811 emit_move_insn (arg->save_area, stack_area);
3816 /* Now that we have saved any slots that will be overwritten by this
3817 store, mark all slots this store will use. We must do this before
3818 we actually expand the argument since the expansion itself may
3819 trigger library calls which might need to use the same stack slot. */
3820 if (argblock && ! variable_size && arg->stack)
3821 for (i = lower_bound; i < upper_bound; i++)
3822 stack_usage_map[i] = 1;
3823 #endif
3825 /* If this isn't going to be placed on both the stack and in registers,
3826 set up the register and number of words. */
3827 if (! arg->pass_on_stack)
3828 reg = arg->reg, partial = arg->partial;
3830 if (reg != 0 && partial == 0)
3831 /* Being passed entirely in a register. We shouldn't be called in
3832 this case. */
3833 abort ();
3835 /* If this arg needs special alignment, don't load the registers
3836 here. */
3837 if (arg->n_aligned_regs != 0)
3838 reg = 0;
3840 /* If this is being passed partially in a register, we can't evaluate
3841 it directly into its stack slot. Otherwise, we can. */
3842 if (arg->value == 0)
3844 #ifdef ACCUMULATE_OUTGOING_ARGS
3845 /* stack_arg_under_construction is nonzero if a function argument is
3846 being evaluated directly into the outgoing argument list and
3847 expand_call must take special action to preserve the argument list
3848 if it is called recursively.
3850 For scalar function arguments stack_usage_map is sufficient to
3851 determine which stack slots must be saved and restored. Scalar
3852 arguments in general have pass_on_stack == 0.
3854 If this argument is initialized by a function which takes the
3855 address of the argument (a C++ constructor or a C function
3856 returning a BLKmode structure), then stack_usage_map is
3857 insufficient and expand_call must push the stack around the
3858 function call. Such arguments have pass_on_stack == 1.
3860 Note that it is always safe to set stack_arg_under_construction,
3861 but this generates suboptimal code if set when not needed. */
3863 if (arg->pass_on_stack)
3864 stack_arg_under_construction++;
3865 #endif
3866 arg->value = expand_expr (pval,
3867 (partial
3868 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
3869 ? NULL_RTX : arg->stack,
3870 VOIDmode, 0);
3872 /* If we are promoting object (or for any other reason) the mode
3873 doesn't agree, convert the mode. */
3875 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
3876 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
3877 arg->value, arg->unsignedp);
3879 #ifdef ACCUMULATE_OUTGOING_ARGS
3880 if (arg->pass_on_stack)
3881 stack_arg_under_construction--;
3882 #endif
3885 /* Don't allow anything left on stack from computation
3886 of argument to alloca. */
3887 if (may_be_alloca)
3888 do_pending_stack_adjust ();
3890 if (arg->value == arg->stack)
3892 /* If the value is already in the stack slot, we are done. */
3893 if (current_function_check_memory_usage && GET_CODE (arg->stack) == MEM)
3895 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3896 XEXP (arg->stack, 0), Pmode,
3897 ARGS_SIZE_RTX (arg->size),
3898 TYPE_MODE (sizetype),
3899 GEN_INT (MEMORY_USE_RW),
3900 TYPE_MODE (integer_type_node));
3903 else if (arg->mode != BLKmode)
3905 register int size;
3907 /* Argument is a scalar, not entirely passed in registers.
3908 (If part is passed in registers, arg->partial says how much
3909 and emit_push_insn will take care of putting it there.)
3911 Push it, and if its size is less than the
3912 amount of space allocated to it,
3913 also bump stack pointer by the additional space.
3914 Note that in C the default argument promotions
3915 will prevent such mismatches. */
3917 size = GET_MODE_SIZE (arg->mode);
3918 /* Compute how much space the push instruction will push.
3919 On many machines, pushing a byte will advance the stack
3920 pointer by a halfword. */
3921 #ifdef PUSH_ROUNDING
3922 size = PUSH_ROUNDING (size);
3923 #endif
3924 used = size;
3926 /* Compute how much space the argument should get:
3927 round up to a multiple of the alignment for arguments. */
3928 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
3929 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
3930 / (PARM_BOUNDARY / BITS_PER_UNIT))
3931 * (PARM_BOUNDARY / BITS_PER_UNIT));
3933 /* This isn't already where we want it on the stack, so put it there.
3934 This can either be done with push or copy insns. */
3935 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
3936 partial, reg, used - size, argblock,
3937 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
3938 ARGS_SIZE_RTX (arg->alignment_pad));
3941 else
3943 /* BLKmode, at least partly to be pushed. */
3945 register int excess;
3946 rtx size_rtx;
3948 /* Pushing a nonscalar.
3949 If part is passed in registers, PARTIAL says how much
3950 and emit_push_insn will take care of putting it there. */
3952 /* Round its size up to a multiple
3953 of the allocation unit for arguments. */
3955 if (arg->size.var != 0)
3957 excess = 0;
3958 size_rtx = ARGS_SIZE_RTX (arg->size);
3960 else
3962 /* PUSH_ROUNDING has no effect on us, because
3963 emit_push_insn for BLKmode is careful to avoid it. */
3964 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
3965 + partial * UNITS_PER_WORD);
3966 size_rtx = expr_size (pval);
3969 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
3970 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
3971 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset),
3972 reg_parm_stack_space,
3973 ARGS_SIZE_RTX (arg->alignment_pad));
3977 /* Unless this is a partially-in-register argument, the argument is now
3978 in the stack.
3980 ??? Note that this can change arg->value from arg->stack to
3981 arg->stack_slot and it matters when they are not the same.
3982 It isn't totally clear that this is correct in all cases. */
3983 if (partial == 0)
3984 arg->value = arg->stack_slot;
3986 /* Once we have pushed something, pops can't safely
3987 be deferred during the rest of the arguments. */
3988 NO_DEFER_POP;
3990 /* ANSI doesn't require a sequence point here,
3991 but PCC has one, so this will avoid some problems. */
3992 emit_queue ();
3994 /* Free any temporary slots made in processing this argument. Show
3995 that we might have taken the address of something and pushed that
3996 as an operand. */
3997 preserve_temp_slots (NULL_RTX);
3998 free_temp_slots ();
3999 pop_temp_slots ();