* arm.c (arm_split_constant): Don't try to force a constant to
[official-gcc.git] / gcc / calls.c
blob185b0e123755e6774cb260cf7b5444a30f8d1c95
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 92-97, 1998, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "rtl.h"
24 #include "tree.h"
25 #include "flags.h"
26 #include "expr.h"
27 #include "regs.h"
28 #include "insn-flags.h"
29 #include "toplev.h"
30 #include "output.h"
32 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
33 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
34 #endif
36 /* Decide whether a function's arguments should be processed
37 from first to last or from last to first.
39 They should if the stack and args grow in opposite directions, but
40 only if we have push insns. */
42 #ifdef PUSH_ROUNDING
44 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
45 #define PUSH_ARGS_REVERSED /* If it's last to first */
46 #endif
48 #endif
50 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
51 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
53 /* Data structure and subroutines used within expand_call. */
55 struct arg_data
57 /* Tree node for this argument. */
58 tree tree_value;
59 /* Mode for value; TYPE_MODE unless promoted. */
60 enum machine_mode mode;
61 /* Current RTL value for argument, or 0 if it isn't precomputed. */
62 rtx value;
63 /* Initially-compute RTL value for argument; only for const functions. */
64 rtx initial_value;
65 /* Register to pass this argument in, 0 if passed on stack, or an
66 PARALLEL if the arg is to be copied into multiple non-contiguous
67 registers. */
68 rtx reg;
69 /* If REG was promoted from the actual mode of the argument expression,
70 indicates whether the promotion is sign- or zero-extended. */
71 int unsignedp;
72 /* Number of registers to use. 0 means put the whole arg in registers.
73 Also 0 if not passed in registers. */
74 int partial;
75 /* Non-zero if argument must be passed on stack.
76 Note that some arguments may be passed on the stack
77 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
78 pass_on_stack identifies arguments that *cannot* go in registers. */
79 int pass_on_stack;
80 /* Offset of this argument from beginning of stack-args. */
81 struct args_size offset;
82 /* Similar, but offset to the start of the stack slot. Different from
83 OFFSET if this arg pads downward. */
84 struct args_size slot_offset;
85 /* Size of this argument on the stack, rounded up for any padding it gets,
86 parts of the argument passed in registers do not count.
87 If REG_PARM_STACK_SPACE is defined, then register parms
88 are counted here as well. */
89 struct args_size size;
90 /* Location on the stack at which parameter should be stored. The store
91 has already been done if STACK == VALUE. */
92 rtx stack;
93 /* Location on the stack of the start of this argument slot. This can
94 differ from STACK if this arg pads downward. This location is known
95 to be aligned to FUNCTION_ARG_BOUNDARY. */
96 rtx stack_slot;
97 #ifdef ACCUMULATE_OUTGOING_ARGS
98 /* Place that this stack area has been saved, if needed. */
99 rtx save_area;
100 #endif
101 /* If an argument's alignment does not permit direct copying into registers,
102 copy in smaller-sized pieces into pseudos. These are stored in a
103 block pointed to by this field. The next field says how many
104 word-sized pseudos we made. */
105 rtx *aligned_regs;
106 int n_aligned_regs;
109 #ifdef ACCUMULATE_OUTGOING_ARGS
110 /* A vector of one char per byte of stack space. A byte if non-zero if
111 the corresponding stack location has been used.
112 This vector is used to prevent a function call within an argument from
113 clobbering any stack already set up. */
114 static char *stack_usage_map;
116 /* Size of STACK_USAGE_MAP. */
117 static int highest_outgoing_arg_in_use;
119 /* stack_arg_under_construction is nonzero when an argument may be
120 initialized with a constructor call (including a C function that
121 returns a BLKmode struct) and expand_call must take special action
122 to make sure the object being constructed does not overlap the
123 argument list for the constructor call. */
124 int stack_arg_under_construction;
125 #endif
127 static int calls_function PROTO ((tree, int));
128 static int calls_function_1 PROTO ((tree, int));
129 static void emit_call_1 PROTO ((rtx, tree, tree, HOST_WIDE_INT,
130 HOST_WIDE_INT, rtx, rtx,
131 int, rtx, int));
132 static void special_function_p PROTO ((char *, tree, int *, int *,
133 int *, int *));
134 static void precompute_register_parameters PROTO ((int, struct arg_data *,
135 int *));
136 static void store_one_arg PROTO ((struct arg_data *, rtx, int, int,
137 int));
138 static void store_unaligned_arguments_into_pseudos PROTO ((struct arg_data *,
139 int));
140 static int finalize_must_preallocate PROTO ((int, int,
141 struct arg_data *,
142 struct args_size *));
143 static void precompute_arguments PROTO ((int, int, int,
144 struct arg_data *,
145 struct args_size *));
146 static int compute_argument_block_size PROTO ((int,
147 struct args_size *));
148 static void initialize_argument_information PROTO ((int,
149 struct arg_data *,
150 struct args_size *,
151 int, tree, tree,
152 CUMULATIVE_ARGS *,
153 int, rtx *, int *,
154 int *, int *));
155 static void compute_argument_addresses PROTO ((struct arg_data *,
156 rtx, int));
157 static rtx rtx_for_function_call PROTO ((tree, tree));
158 static void load_register_parameters PROTO ((struct arg_data *,
159 int, rtx *));
161 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
162 static rtx save_fixed_argument_area PROTO ((int, rtx, int *, int *));
163 static void restore_fixed_argument_area PROTO ((rtx, rtx, int, int));
164 #endif
166 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
167 `alloca'.
169 If WHICH is 0, return 1 if EXP contains a call to any function.
170 Actually, we only need return 1 if evaluating EXP would require pushing
171 arguments on the stack, but that is too difficult to compute, so we just
172 assume any function call might require the stack. */
174 static tree calls_function_save_exprs;
176 static int
177 calls_function (exp, which)
178 tree exp;
179 int which;
181 int val;
182 calls_function_save_exprs = 0;
183 val = calls_function_1 (exp, which);
184 calls_function_save_exprs = 0;
185 return val;
188 static int
189 calls_function_1 (exp, which)
190 tree exp;
191 int which;
193 register int i;
194 enum tree_code code = TREE_CODE (exp);
195 int type = TREE_CODE_CLASS (code);
196 int length = tree_code_length[(int) code];
198 /* If this code is language-specific, we don't know what it will do. */
199 if ((int) code >= NUM_TREE_CODES)
200 return 1;
202 /* Only expressions and references can contain calls. */
203 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
204 && type != 'b')
205 return 0;
207 switch (code)
209 case CALL_EXPR:
210 if (which == 0)
211 return 1;
212 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
213 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
214 == FUNCTION_DECL))
216 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
218 if ((DECL_BUILT_IN (fndecl)
219 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
220 || (DECL_SAVED_INSNS (fndecl)
221 && (FUNCTION_FLAGS (DECL_SAVED_INSNS (fndecl))
222 & FUNCTION_FLAGS_CALLS_ALLOCA)))
223 return 1;
226 /* Third operand is RTL. */
227 length = 2;
228 break;
230 case SAVE_EXPR:
231 if (SAVE_EXPR_RTL (exp) != 0)
232 return 0;
233 if (value_member (exp, calls_function_save_exprs))
234 return 0;
235 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
236 calls_function_save_exprs);
237 return (TREE_OPERAND (exp, 0) != 0
238 && calls_function_1 (TREE_OPERAND (exp, 0), which));
240 case BLOCK:
242 register tree local;
244 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
245 if (DECL_INITIAL (local) != 0
246 && calls_function_1 (DECL_INITIAL (local), which))
247 return 1;
250 register tree subblock;
252 for (subblock = BLOCK_SUBBLOCKS (exp);
253 subblock;
254 subblock = TREE_CHAIN (subblock))
255 if (calls_function_1 (subblock, which))
256 return 1;
258 return 0;
260 case METHOD_CALL_EXPR:
261 length = 3;
262 break;
264 case WITH_CLEANUP_EXPR:
265 length = 1;
266 break;
268 case RTL_EXPR:
269 return 0;
271 default:
272 break;
275 for (i = 0; i < length; i++)
276 if (TREE_OPERAND (exp, i) != 0
277 && calls_function_1 (TREE_OPERAND (exp, i), which))
278 return 1;
280 return 0;
283 /* Force FUNEXP into a form suitable for the address of a CALL,
284 and return that as an rtx. Also load the static chain register
285 if FNDECL is a nested function.
287 CALL_FUSAGE points to a variable holding the prospective
288 CALL_INSN_FUNCTION_USAGE information. */
291 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
292 rtx funexp;
293 tree fndecl;
294 rtx *call_fusage;
295 int reg_parm_seen;
297 rtx static_chain_value = 0;
299 funexp = protect_from_queue (funexp, 0);
301 if (fndecl != 0)
302 /* Get possible static chain value for nested function in C. */
303 static_chain_value = lookup_static_chain (fndecl);
305 /* Make a valid memory address and copy constants thru pseudo-regs,
306 but not for a constant address if -fno-function-cse. */
307 if (GET_CODE (funexp) != SYMBOL_REF)
308 /* If we are using registers for parameters, force the
309 function address into a register now. */
310 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
311 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
312 : memory_address (FUNCTION_MODE, funexp));
313 else
315 #ifndef NO_FUNCTION_CSE
316 if (optimize && ! flag_no_function_cse)
317 #ifdef NO_RECURSIVE_FUNCTION_CSE
318 if (fndecl != current_function_decl)
319 #endif
320 funexp = force_reg (Pmode, funexp);
321 #endif
324 if (static_chain_value != 0)
326 emit_move_insn (static_chain_rtx, static_chain_value);
328 if (GET_CODE (static_chain_rtx) == REG)
329 use_reg (call_fusage, static_chain_rtx);
332 return funexp;
335 /* Generate instructions to call function FUNEXP,
336 and optionally pop the results.
337 The CALL_INSN is the first insn generated.
339 FNDECL is the declaration node of the function. This is given to the
340 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
342 FUNTYPE is the data type of the function. This is given to the macro
343 RETURN_POPS_ARGS to determine whether this function pops its own args.
344 We used to allow an identifier for library functions, but that doesn't
345 work when the return type is an aggregate type and the calling convention
346 says that the pointer to this aggregate is to be popped by the callee.
348 STACK_SIZE is the number of bytes of arguments on the stack,
349 rounded up to PREFERRED_STACK_BOUNDARY; zero if the size is variable.
350 This is both to put into the call insn and
351 to generate explicit popping code if necessary.
353 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
354 It is zero if this call doesn't want a structure value.
356 NEXT_ARG_REG is the rtx that results from executing
357 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
358 just after all the args have had their registers assigned.
359 This could be whatever you like, but normally it is the first
360 arg-register beyond those used for args in this call,
361 or 0 if all the arg-registers are used in this call.
362 It is passed on to `gen_call' so you can put this info in the call insn.
364 VALREG is a hard register in which a value is returned,
365 or 0 if the call does not return a value.
367 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
368 the args to this call were processed.
369 We restore `inhibit_defer_pop' to that value.
371 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
372 denote registers used by the called function.
374 IS_CONST is true if this is a `const' call. */
376 static void
377 emit_call_1 (funexp, fndecl, funtype, stack_size, struct_value_size,
378 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
379 is_const)
380 rtx funexp;
381 tree fndecl ATTRIBUTE_UNUSED;
382 tree funtype ATTRIBUTE_UNUSED;
383 HOST_WIDE_INT stack_size;
384 HOST_WIDE_INT struct_value_size;
385 rtx next_arg_reg;
386 rtx valreg;
387 int old_inhibit_defer_pop;
388 rtx call_fusage;
389 int is_const;
391 rtx stack_size_rtx = GEN_INT (stack_size);
392 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
393 rtx call_insn;
394 #ifndef ACCUMULATE_OUTGOING_ARGS
395 int already_popped = 0;
396 #endif
398 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
399 and we don't want to load it into a register as an optimization,
400 because prepare_call_address already did it if it should be done. */
401 if (GET_CODE (funexp) != SYMBOL_REF)
402 funexp = memory_address (FUNCTION_MODE, funexp);
404 #ifndef ACCUMULATE_OUTGOING_ARGS
405 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
406 if (HAVE_call_pop && HAVE_call_value_pop
407 && (RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0
408 || stack_size == 0))
410 rtx n_pop = GEN_INT (RETURN_POPS_ARGS (fndecl, funtype, stack_size));
411 rtx pat;
413 /* If this subroutine pops its own args, record that in the call insn
414 if possible, for the sake of frame pointer elimination. */
416 if (valreg)
417 pat = gen_call_value_pop (valreg,
418 gen_rtx_MEM (FUNCTION_MODE, funexp),
419 stack_size_rtx, next_arg_reg, n_pop);
420 else
421 pat = gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
422 stack_size_rtx, next_arg_reg, n_pop);
424 emit_call_insn (pat);
425 already_popped = 1;
427 else
428 #endif
429 #endif
431 #if defined (HAVE_call) && defined (HAVE_call_value)
432 if (HAVE_call && HAVE_call_value)
434 if (valreg)
435 emit_call_insn (gen_call_value (valreg,
436 gen_rtx_MEM (FUNCTION_MODE, funexp),
437 stack_size_rtx, next_arg_reg,
438 NULL_RTX));
439 else
440 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, funexp),
441 stack_size_rtx, next_arg_reg,
442 struct_value_size_rtx));
444 else
445 #endif
446 abort ();
448 /* Find the CALL insn we just emitted. */
449 for (call_insn = get_last_insn ();
450 call_insn && GET_CODE (call_insn) != CALL_INSN;
451 call_insn = PREV_INSN (call_insn))
454 if (! call_insn)
455 abort ();
457 /* Put the register usage information on the CALL. If there is already
458 some usage information, put ours at the end. */
459 if (CALL_INSN_FUNCTION_USAGE (call_insn))
461 rtx link;
463 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
464 link = XEXP (link, 1))
467 XEXP (link, 1) = call_fusage;
469 else
470 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
472 /* If this is a const call, then set the insn's unchanging bit. */
473 if (is_const)
474 CONST_CALL_P (call_insn) = 1;
476 /* Restore this now, so that we do defer pops for this call's args
477 if the context of the call as a whole permits. */
478 inhibit_defer_pop = old_inhibit_defer_pop;
480 #ifndef ACCUMULATE_OUTGOING_ARGS
481 /* If returning from the subroutine does not automatically pop the args,
482 we need an instruction to pop them sooner or later.
483 Perhaps do it now; perhaps just record how much space to pop later.
485 If returning from the subroutine does pop the args, indicate that the
486 stack pointer will be changed. */
488 if (stack_size != 0 && RETURN_POPS_ARGS (fndecl, funtype, stack_size) > 0)
490 if (!already_popped)
491 CALL_INSN_FUNCTION_USAGE (call_insn)
492 = gen_rtx_EXPR_LIST (VOIDmode,
493 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
494 CALL_INSN_FUNCTION_USAGE (call_insn));
495 stack_size -= RETURN_POPS_ARGS (fndecl, funtype, stack_size);
496 stack_size_rtx = GEN_INT (stack_size);
499 if (stack_size != 0)
501 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
502 pending_stack_adjust += stack_size;
503 else
504 adjust_stack (stack_size_rtx);
506 #endif
509 /* Determine if the function identified by NAME and FNDECL is one with
510 special properties we wish to know about.
512 For example, if the function might return more than one time (setjmp), then
513 set RETURNS_TWICE to a nonzero value.
515 Similarly set IS_LONGJMP for if the function is in the longjmp family.
517 Set IS_MALLOC for any of the standard memory allocation functions which
518 allocate from the heap.
520 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
521 space from the stack such as alloca. */
523 static void
524 special_function_p (name, fndecl, returns_twice, is_longjmp,
525 is_malloc, may_be_alloca)
526 char *name;
527 tree fndecl;
528 int *returns_twice;
529 int *is_longjmp;
530 int *is_malloc;
531 int *may_be_alloca;
533 *returns_twice = 0;
534 *is_longjmp = 0;
535 *is_malloc = 0;
536 *may_be_alloca = 0;
538 if (name != 0 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
539 /* Exclude functions not at the file scope, or not `extern',
540 since they are not the magic functions we would otherwise
541 think they are. */
542 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
544 char *tname = name;
546 /* We assume that alloca will always be called by name. It
547 makes no sense to pass it as a pointer-to-function to
548 anything that does not understand its behavior. */
549 *may_be_alloca
550 = (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
551 && name[0] == 'a'
552 && ! strcmp (name, "alloca"))
553 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
554 && name[0] == '_'
555 && ! strcmp (name, "__builtin_alloca"))));
557 /* Disregard prefix _, __ or __x. */
558 if (name[0] == '_')
560 if (name[1] == '_' && name[2] == 'x')
561 tname += 3;
562 else if (name[1] == '_')
563 tname += 2;
564 else
565 tname += 1;
568 if (tname[0] == 's')
570 *returns_twice
571 = ((tname[1] == 'e'
572 && (! strcmp (tname, "setjmp")
573 || ! strcmp (tname, "setjmp_syscall")))
574 || (tname[1] == 'i'
575 && ! strcmp (tname, "sigsetjmp"))
576 || (tname[1] == 'a'
577 && ! strcmp (tname, "savectx")));
578 if (tname[1] == 'i'
579 && ! strcmp (tname, "siglongjmp"))
580 *is_longjmp = 1;
582 else if ((tname[0] == 'q' && tname[1] == 's'
583 && ! strcmp (tname, "qsetjmp"))
584 || (tname[0] == 'v' && tname[1] == 'f'
585 && ! strcmp (tname, "vfork")))
586 *returns_twice = 1;
588 else if (tname[0] == 'l' && tname[1] == 'o'
589 && ! strcmp (tname, "longjmp"))
590 *is_longjmp = 1;
591 /* XXX should have "malloc" attribute on functions instead
592 of recognizing them by name. */
593 else if (! strcmp (tname, "malloc")
594 || ! strcmp (tname, "calloc")
595 || ! strcmp (tname, "realloc")
596 /* Note use of NAME rather than TNAME here. These functions
597 are only reserved when preceded with __. */
598 || ! strcmp (name, "__vn") /* mangled __builtin_vec_new */
599 || ! strcmp (name, "__nw") /* mangled __builtin_new */
600 || ! strcmp (name, "__builtin_new")
601 || ! strcmp (name, "__builtin_vec_new"))
602 *is_malloc = 1;
606 /* Precompute all register parameters as described by ARGS, storing values
607 into fields within the ARGS array.
609 NUM_ACTUALS indicates the total number elements in the ARGS array.
611 Set REG_PARM_SEEN if we encounter a register parameter. */
613 static void
614 precompute_register_parameters (num_actuals, args, reg_parm_seen)
615 int num_actuals;
616 struct arg_data *args;
617 int *reg_parm_seen;
619 int i;
621 *reg_parm_seen = 0;
623 for (i = 0; i < num_actuals; i++)
624 if (args[i].reg != 0 && ! args[i].pass_on_stack)
626 *reg_parm_seen = 1;
628 if (args[i].value == 0)
630 push_temp_slots ();
631 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
632 VOIDmode, 0);
633 preserve_temp_slots (args[i].value);
634 pop_temp_slots ();
636 /* ANSI doesn't require a sequence point here,
637 but PCC has one, so this will avoid some problems. */
638 emit_queue ();
641 /* If we are to promote the function arg to a wider mode,
642 do it now. */
644 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
645 args[i].value
646 = convert_modes (args[i].mode,
647 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
648 args[i].value, args[i].unsignedp);
650 /* If the value is expensive, and we are inside an appropriately
651 short loop, put the value into a pseudo and then put the pseudo
652 into the hard reg.
654 For small register classes, also do this if this call uses
655 register parameters. This is to avoid reload conflicts while
656 loading the parameters registers. */
658 if ((! (GET_CODE (args[i].value) == REG
659 || (GET_CODE (args[i].value) == SUBREG
660 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
661 && args[i].mode != BLKmode
662 && rtx_cost (args[i].value, SET) > 2
663 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
664 || preserve_subexpressions_p ()))
665 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
669 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
671 /* The argument list is the property of the called routine and it
672 may clobber it. If the fixed area has been used for previous
673 parameters, we must save and restore it. */
674 static rtx
675 save_fixed_argument_area (reg_parm_stack_space, argblock,
676 low_to_save, high_to_save)
677 int reg_parm_stack_space;
678 rtx argblock;
679 int *low_to_save;
680 int *high_to_save;
682 int i;
683 rtx save_area = NULL_RTX;
685 /* Compute the boundary of the that needs to be saved, if any. */
686 #ifdef ARGS_GROW_DOWNWARD
687 for (i = 0; i < reg_parm_stack_space + 1; i++)
688 #else
689 for (i = 0; i < reg_parm_stack_space; i++)
690 #endif
692 if (i >= highest_outgoing_arg_in_use
693 || stack_usage_map[i] == 0)
694 continue;
696 if (*low_to_save == -1)
697 *low_to_save = i;
699 *high_to_save = i;
702 if (*low_to_save >= 0)
704 int num_to_save = *high_to_save - *low_to_save + 1;
705 enum machine_mode save_mode
706 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
707 rtx stack_area;
709 /* If we don't have the required alignment, must do this in BLKmode. */
710 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
711 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
712 save_mode = BLKmode;
714 #ifdef ARGS_GROW_DOWNWARD
715 stack_area = gen_rtx_MEM (save_mode,
716 memory_address (save_mode,
717 plus_constant (argblock,
718 - *high_to_save)));
719 #else
720 stack_area = gen_rtx_MEM (save_mode,
721 memory_address (save_mode,
722 plus_constant (argblock,
723 *low_to_save)));
724 #endif
725 if (save_mode == BLKmode)
727 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
728 emit_block_move (validize_mem (save_area), stack_area,
729 GEN_INT (num_to_save),
730 PARM_BOUNDARY / BITS_PER_UNIT);
732 else
734 save_area = gen_reg_rtx (save_mode);
735 emit_move_insn (save_area, stack_area);
738 return save_area;
741 static void
742 restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
743 rtx save_area;
744 rtx argblock;
745 int high_to_save;
746 int low_to_save;
748 enum machine_mode save_mode = GET_MODE (save_area);
749 #ifdef ARGS_GROW_DOWNWARD
750 rtx stack_area
751 = gen_rtx_MEM (save_mode,
752 memory_address (save_mode,
753 plus_constant (argblock,
754 - high_to_save)));
755 #else
756 rtx stack_area
757 = gen_rtx_MEM (save_mode,
758 memory_address (save_mode,
759 plus_constant (argblock,
760 low_to_save)));
761 #endif
763 if (save_mode != BLKmode)
764 emit_move_insn (stack_area, save_area);
765 else
766 emit_block_move (stack_area, validize_mem (save_area),
767 GEN_INT (high_to_save - low_to_save + 1),
768 PARM_BOUNDARY / BITS_PER_UNIT);
770 #endif
772 /* If any elements in ARGS refer to parameters that are to be passed in
773 registers, but not in memory, and whose alignment does not permit a
774 direct copy into registers. Copy the values into a group of pseudos
775 which we will later copy into the appropriate hard registers.
777 Pseudos for each unaligned argument will be stored into the array
778 args[argnum].aligned_regs. The caller is responsible for deallocating
779 the aligned_regs array if it is nonzero. */
781 static void
782 store_unaligned_arguments_into_pseudos (args, num_actuals)
783 struct arg_data *args;
784 int num_actuals;
786 int i, j;
788 for (i = 0; i < num_actuals; i++)
789 if (args[i].reg != 0 && ! args[i].pass_on_stack
790 && args[i].mode == BLKmode
791 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
792 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
794 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
795 int big_endian_correction = 0;
797 args[i].n_aligned_regs
798 = args[i].partial ? args[i].partial
799 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
801 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
802 * args[i].n_aligned_regs);
804 /* Structures smaller than a word are aligned to the least
805 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
806 this means we must skip the empty high order bytes when
807 calculating the bit offset. */
808 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
809 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
811 for (j = 0; j < args[i].n_aligned_regs; j++)
813 rtx reg = gen_reg_rtx (word_mode);
814 rtx word = operand_subword_force (args[i].value, j, BLKmode);
815 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
816 int bitalign = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
818 args[i].aligned_regs[j] = reg;
820 /* There is no need to restrict this code to loading items
821 in TYPE_ALIGN sized hunks. The bitfield instructions can
822 load up entire word sized registers efficiently.
824 ??? This may not be needed anymore.
825 We use to emit a clobber here but that doesn't let later
826 passes optimize the instructions we emit. By storing 0 into
827 the register later passes know the first AND to zero out the
828 bitfield being set in the register is unnecessary. The store
829 of 0 will be deleted as will at least the first AND. */
831 emit_move_insn (reg, const0_rtx);
833 bytes -= bitsize / BITS_PER_UNIT;
834 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
835 extract_bit_field (word, bitsize, 0, 1,
836 NULL_RTX, word_mode,
837 word_mode,
838 bitalign / BITS_PER_UNIT,
839 BITS_PER_WORD),
840 bitalign / BITS_PER_UNIT, BITS_PER_WORD);
845 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
846 ACTPARMS.
848 NUM_ACTUALS is the total number of parameters.
850 N_NAMED_ARGS is the total number of named arguments.
852 FNDECL is the tree code for the target of this call (if known)
854 ARGS_SO_FAR holds state needed by the target to know where to place
855 the next argument.
857 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
858 for arguments which are passed in registers.
860 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
861 and may be modified by this routine.
863 OLD_PENDING_ADJ, MUST_PREALLOCATE and IS_CONST are pointers to integer
864 flags which may may be modified by this routine. */
866 static void
867 initialize_argument_information (num_actuals, args, args_size, n_named_args,
868 actparms, fndecl, args_so_far,
869 reg_parm_stack_space, old_stack_level,
870 old_pending_adj, must_preallocate, is_const)
871 int num_actuals;
872 struct arg_data *args;
873 struct args_size *args_size;
874 int n_named_args;
875 tree actparms;
876 tree fndecl;
877 CUMULATIVE_ARGS *args_so_far;
878 int reg_parm_stack_space;
879 rtx *old_stack_level;
880 int *old_pending_adj;
881 int *must_preallocate;
882 int *is_const;
884 /* 1 if scanning parms front to back, -1 if scanning back to front. */
885 int inc;
887 /* Count arg position in order args appear. */
888 int argpos;
890 int i;
891 tree p;
893 args_size->constant = 0;
894 args_size->var = 0;
896 /* In this loop, we consider args in the order they are written.
897 We fill up ARGS from the front or from the back if necessary
898 so that in any case the first arg to be pushed ends up at the front. */
900 #ifdef PUSH_ARGS_REVERSED
901 i = num_actuals - 1, inc = -1;
902 /* In this case, must reverse order of args
903 so that we compute and push the last arg first. */
904 #else
905 i = 0, inc = 1;
906 #endif
908 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
909 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
911 tree type = TREE_TYPE (TREE_VALUE (p));
912 int unsignedp;
913 enum machine_mode mode;
915 args[i].tree_value = TREE_VALUE (p);
917 /* Replace erroneous argument with constant zero. */
918 if (type == error_mark_node || TYPE_SIZE (type) == 0)
919 args[i].tree_value = integer_zero_node, type = integer_type_node;
921 /* If TYPE is a transparent union, pass things the way we would
922 pass the first field of the union. We have already verified that
923 the modes are the same. */
924 if (TYPE_TRANSPARENT_UNION (type))
925 type = TREE_TYPE (TYPE_FIELDS (type));
927 /* Decide where to pass this arg.
929 args[i].reg is nonzero if all or part is passed in registers.
931 args[i].partial is nonzero if part but not all is passed in registers,
932 and the exact value says how many words are passed in registers.
934 args[i].pass_on_stack is nonzero if the argument must at least be
935 computed on the stack. It may then be loaded back into registers
936 if args[i].reg is nonzero.
938 These decisions are driven by the FUNCTION_... macros and must agree
939 with those made by function.c. */
941 /* See if this argument should be passed by invisible reference. */
942 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
943 && contains_placeholder_p (TYPE_SIZE (type)))
944 || TREE_ADDRESSABLE (type)
945 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
946 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
947 type, argpos < n_named_args)
948 #endif
951 /* If we're compiling a thunk, pass through invisible
952 references instead of making a copy. */
953 if (current_function_is_thunk
954 #ifdef FUNCTION_ARG_CALLEE_COPIES
955 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
956 type, argpos < n_named_args)
957 /* If it's in a register, we must make a copy of it too. */
958 /* ??? Is this a sufficient test? Is there a better one? */
959 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
960 && REG_P (DECL_RTL (args[i].tree_value)))
961 && ! TREE_ADDRESSABLE (type))
962 #endif
965 /* C++ uses a TARGET_EXPR to indicate that we want to make a
966 new object from the argument. If we are passing by
967 invisible reference, the callee will do that for us, so we
968 can strip off the TARGET_EXPR. This is not always safe,
969 but it is safe in the only case where this is a useful
970 optimization; namely, when the argument is a plain object.
971 In that case, the frontend is just asking the backend to
972 make a bitwise copy of the argument. */
974 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
975 && (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND
976 (args[i].tree_value, 1)))
977 == 'd')
978 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
979 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
981 args[i].tree_value = build1 (ADDR_EXPR,
982 build_pointer_type (type),
983 args[i].tree_value);
984 type = build_pointer_type (type);
986 else
988 /* We make a copy of the object and pass the address to the
989 function being called. */
990 rtx copy;
992 if (TYPE_SIZE (type) == 0
993 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
994 || (flag_stack_check && ! STACK_CHECK_BUILTIN
995 && (TREE_INT_CST_HIGH (TYPE_SIZE (type)) != 0
996 || (TREE_INT_CST_LOW (TYPE_SIZE (type))
997 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
999 /* This is a variable-sized object. Make space on the stack
1000 for it. */
1001 rtx size_rtx = expr_size (TREE_VALUE (p));
1003 if (*old_stack_level == 0)
1005 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1006 *old_pending_adj = pending_stack_adjust;
1007 pending_stack_adjust = 0;
1010 copy = gen_rtx_MEM (BLKmode,
1011 allocate_dynamic_stack_space (size_rtx,
1012 NULL_RTX,
1013 TYPE_ALIGN (type)));
1015 else
1017 int size = int_size_in_bytes (type);
1018 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
1021 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
1023 store_expr (args[i].tree_value, copy, 0);
1024 *is_const = 0;
1026 args[i].tree_value = build1 (ADDR_EXPR,
1027 build_pointer_type (type),
1028 make_tree (type, copy));
1029 type = build_pointer_type (type);
1033 mode = TYPE_MODE (type);
1034 unsignedp = TREE_UNSIGNED (type);
1036 #ifdef PROMOTE_FUNCTION_ARGS
1037 mode = promote_mode (type, mode, &unsignedp, 1);
1038 #endif
1040 args[i].unsignedp = unsignedp;
1041 args[i].mode = mode;
1042 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1043 argpos < n_named_args);
1044 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1045 if (args[i].reg)
1046 args[i].partial
1047 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1048 argpos < n_named_args);
1049 #endif
1051 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1053 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1054 it means that we are to pass this arg in the register(s) designated
1055 by the PARALLEL, but also to pass it in the stack. */
1056 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1057 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1058 args[i].pass_on_stack = 1;
1060 /* If this is an addressable type, we must preallocate the stack
1061 since we must evaluate the object into its final location.
1063 If this is to be passed in both registers and the stack, it is simpler
1064 to preallocate. */
1065 if (TREE_ADDRESSABLE (type)
1066 || (args[i].pass_on_stack && args[i].reg != 0))
1067 *must_preallocate = 1;
1069 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1070 we cannot consider this function call constant. */
1071 if (TREE_ADDRESSABLE (type))
1072 *is_const = 0;
1074 /* Compute the stack-size of this argument. */
1075 if (args[i].reg == 0 || args[i].partial != 0
1076 || reg_parm_stack_space > 0
1077 || args[i].pass_on_stack)
1078 locate_and_pad_parm (mode, type,
1079 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1081 #else
1082 args[i].reg != 0,
1083 #endif
1084 fndecl, args_size, &args[i].offset,
1085 &args[i].size);
1087 #ifndef ARGS_GROW_DOWNWARD
1088 args[i].slot_offset = *args_size;
1089 #endif
1091 /* If a part of the arg was put into registers,
1092 don't include that part in the amount pushed. */
1093 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1094 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1095 / (PARM_BOUNDARY / BITS_PER_UNIT)
1096 * (PARM_BOUNDARY / BITS_PER_UNIT));
1098 /* Update ARGS_SIZE, the total stack space for args so far. */
1100 args_size->constant += args[i].size.constant;
1101 if (args[i].size.var)
1103 ADD_PARM_SIZE (*args_size, args[i].size.var);
1106 /* Since the slot offset points to the bottom of the slot,
1107 we must record it after incrementing if the args grow down. */
1108 #ifdef ARGS_GROW_DOWNWARD
1109 args[i].slot_offset = *args_size;
1111 args[i].slot_offset.constant = -args_size->constant;
1112 if (args_size->var)
1114 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
1116 #endif
1118 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1119 have been used, etc. */
1121 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1122 argpos < n_named_args);
1126 /* Update ARGS_SIZE to contain the total size for the argument block.
1127 Return the original constant component of the argument block's size.
1129 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1130 for arguments passed in registers. */
1132 static int
1133 compute_argument_block_size (reg_parm_stack_space, args_size)
1134 int reg_parm_stack_space;
1135 struct args_size *args_size;
1137 int unadjusted_args_size = args_size->constant;
1139 /* Compute the actual size of the argument block required. The variable
1140 and constant sizes must be combined, the size may have to be rounded,
1141 and there may be a minimum required size. */
1143 if (args_size->var)
1145 args_size->var = ARGS_SIZE_TREE (*args_size);
1146 args_size->constant = 0;
1148 #ifdef PREFERRED_STACK_BOUNDARY
1149 if (PREFERRED_STACK_BOUNDARY != BITS_PER_UNIT)
1150 args_size->var = round_up (args_size->var, STACK_BYTES);
1151 #endif
1153 if (reg_parm_stack_space > 0)
1155 args_size->var
1156 = size_binop (MAX_EXPR, args_size->var,
1157 size_int (reg_parm_stack_space));
1159 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1160 /* The area corresponding to register parameters is not to count in
1161 the size of the block we need. So make the adjustment. */
1162 args_size->var
1163 = size_binop (MINUS_EXPR, args_size->var,
1164 size_int (reg_parm_stack_space));
1165 #endif
1168 else
1170 #ifdef PREFERRED_STACK_BOUNDARY
1171 args_size->constant = (((args_size->constant + (STACK_BYTES - 1))
1172 / STACK_BYTES) * STACK_BYTES);
1173 #endif
1175 args_size->constant = MAX (args_size->constant,
1176 reg_parm_stack_space);
1178 #ifdef MAYBE_REG_PARM_STACK_SPACE
1179 if (reg_parm_stack_space == 0)
1180 args_size->constant = 0;
1181 #endif
1183 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1184 args_size->constant -= reg_parm_stack_space;
1185 #endif
1187 return unadjusted_args_size;
1190 /* Precompute parameters has needed for a function call.
1192 IS_CONST indicates the target function is a pure function.
1194 MUST_PREALLOCATE indicates that we must preallocate stack space for
1195 any stack arguments.
1197 NUM_ACTUALS is the number of arguments.
1199 ARGS is an array containing information for each argument; this routine
1200 fills in the INITIAL_VALUE and VALUE fields for each precomputed argument.
1202 ARGS_SIZE contains information about the size of the arg list. */
1204 static void
1205 precompute_arguments (is_const, must_preallocate, num_actuals, args, args_size)
1206 int is_const;
1207 int must_preallocate;
1208 int num_actuals;
1209 struct arg_data *args;
1210 struct args_size *args_size;
1212 int i;
1214 /* If this function call is cse'able, precompute all the parameters.
1215 Note that if the parameter is constructed into a temporary, this will
1216 cause an additional copy because the parameter will be constructed
1217 into a temporary location and then copied into the outgoing arguments.
1218 If a parameter contains a call to alloca and this function uses the
1219 stack, precompute the parameter. */
1221 /* If we preallocated the stack space, and some arguments must be passed
1222 on the stack, then we must precompute any parameter which contains a
1223 function call which will store arguments on the stack.
1224 Otherwise, evaluating the parameter may clobber previous parameters
1225 which have already been stored into the stack. */
1227 for (i = 0; i < num_actuals; i++)
1228 if (is_const
1229 || ((args_size->var != 0 || args_size->constant != 0)
1230 && calls_function (args[i].tree_value, 1))
1231 || (must_preallocate
1232 && (args_size->var != 0 || args_size->constant != 0)
1233 && calls_function (args[i].tree_value, 0)))
1235 /* If this is an addressable type, we cannot pre-evaluate it. */
1236 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1237 abort ();
1239 push_temp_slots ();
1241 args[i].initial_value = args[i].value
1242 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1244 preserve_temp_slots (args[i].value);
1245 pop_temp_slots ();
1247 /* ANSI doesn't require a sequence point here,
1248 but PCC has one, so this will avoid some problems. */
1249 emit_queue ();
1251 args[i].initial_value = args[i].value
1252 = protect_from_queue (args[i].initial_value, 0);
1254 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1255 args[i].value
1256 = convert_modes (args[i].mode,
1257 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1258 args[i].value, args[i].unsignedp);
1262 /* Given the current state of MUST_PREALLOCATE and information about
1263 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1264 compute and return the final value for MUST_PREALLOCATE. */
1266 static int
1267 finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1268 int must_preallocate;
1269 int num_actuals;
1270 struct arg_data *args;
1271 struct args_size *args_size;
1273 /* See if we have or want to preallocate stack space.
1275 If we would have to push a partially-in-regs parm
1276 before other stack parms, preallocate stack space instead.
1278 If the size of some parm is not a multiple of the required stack
1279 alignment, we must preallocate.
1281 If the total size of arguments that would otherwise create a copy in
1282 a temporary (such as a CALL) is more than half the total argument list
1283 size, preallocation is faster.
1285 Another reason to preallocate is if we have a machine (like the m88k)
1286 where stack alignment is required to be maintained between every
1287 pair of insns, not just when the call is made. However, we assume here
1288 that such machines either do not have push insns (and hence preallocation
1289 would occur anyway) or the problem is taken care of with
1290 PUSH_ROUNDING. */
1292 if (! must_preallocate)
1294 int partial_seen = 0;
1295 int copy_to_evaluate_size = 0;
1296 int i;
1298 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1300 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1301 partial_seen = 1;
1302 else if (partial_seen && args[i].reg == 0)
1303 must_preallocate = 1;
1305 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1306 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1307 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1308 || TREE_CODE (args[i].tree_value) == COND_EXPR
1309 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1310 copy_to_evaluate_size
1311 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1314 if (copy_to_evaluate_size * 2 >= args_size->constant
1315 && args_size->constant > 0)
1316 must_preallocate = 1;
1318 return must_preallocate;
1321 /* If we preallocated stack space, compute the address of each argument
1322 and store it into the ARGS array.
1324 We need not ensure it is a valid memory address here; it will be
1325 validized when it is used.
1327 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1329 static void
1330 compute_argument_addresses (args, argblock, num_actuals)
1331 struct arg_data *args;
1332 rtx argblock;
1333 int num_actuals;
1335 if (argblock)
1337 rtx arg_reg = argblock;
1338 int i, arg_offset = 0;
1340 if (GET_CODE (argblock) == PLUS)
1341 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1343 for (i = 0; i < num_actuals; i++)
1345 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1346 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1347 rtx addr;
1349 /* Skip this parm if it will not be passed on the stack. */
1350 if (! args[i].pass_on_stack && args[i].reg != 0)
1351 continue;
1353 if (GET_CODE (offset) == CONST_INT)
1354 addr = plus_constant (arg_reg, INTVAL (offset));
1355 else
1356 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1358 addr = plus_constant (addr, arg_offset);
1359 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1360 MEM_SET_IN_STRUCT_P
1361 (args[i].stack,
1362 AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value)));
1364 if (GET_CODE (slot_offset) == CONST_INT)
1365 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1366 else
1367 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1369 addr = plus_constant (addr, arg_offset);
1370 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1375 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1376 in a call instruction.
1378 FNDECL is the tree node for the target function. For an indirect call
1379 FNDECL will be NULL_TREE.
1381 EXP is the CALL_EXPR for this call. */
1383 static rtx
1384 rtx_for_function_call (fndecl, exp)
1385 tree fndecl;
1386 tree exp;
1388 rtx funexp;
1390 /* Get the function to call, in the form of RTL. */
1391 if (fndecl)
1393 /* If this is the first use of the function, see if we need to
1394 make an external definition for it. */
1395 if (! TREE_USED (fndecl))
1397 assemble_external (fndecl);
1398 TREE_USED (fndecl) = 1;
1401 /* Get a SYMBOL_REF rtx for the function address. */
1402 funexp = XEXP (DECL_RTL (fndecl), 0);
1404 else
1405 /* Generate an rtx (probably a pseudo-register) for the address. */
1407 push_temp_slots ();
1408 funexp = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1409 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1411 /* Check the function is executable. */
1412 if (current_function_check_memory_usage)
1413 emit_library_call (chkr_check_exec_libfunc, 1,
1414 VOIDmode, 1,
1415 funexp, ptr_mode);
1416 emit_queue ();
1418 return funexp;
1421 /* Do the register loads required for any wholly-register parms or any
1422 parms which are passed both on the stack and in a register. Their
1423 expressions were already evaluated.
1425 Mark all register-parms as living through the call, putting these USE
1426 insns in the CALL_INSN_FUNCTION_USAGE field. */
1428 static void
1429 load_register_parameters (args, num_actuals, call_fusage)
1430 struct arg_data *args;
1431 int num_actuals;
1432 rtx *call_fusage;
1434 int i, j;
1436 #ifdef LOAD_ARGS_REVERSED
1437 for (i = num_actuals - 1; i >= 0; i--)
1438 #else
1439 for (i = 0; i < num_actuals; i++)
1440 #endif
1442 rtx reg = args[i].reg;
1443 int partial = args[i].partial;
1444 int nregs;
1446 if (reg)
1448 /* Set to non-negative if must move a word at a time, even if just
1449 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1450 we just use a normal move insn. This value can be zero if the
1451 argument is a zero size structure with no fields. */
1452 nregs = (partial ? partial
1453 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1454 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1455 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1456 : -1));
1458 /* Handle calls that pass values in multiple non-contiguous
1459 locations. The Irix 6 ABI has examples of this. */
1461 if (GET_CODE (reg) == PARALLEL)
1463 emit_group_load (reg, args[i].value,
1464 int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
1465 (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1466 / BITS_PER_UNIT));
1469 /* If simple case, just do move. If normal partial, store_one_arg
1470 has already loaded the register for us. In all other cases,
1471 load the register(s) from memory. */
1473 else if (nregs == -1)
1474 emit_move_insn (reg, args[i].value);
1476 /* If we have pre-computed the values to put in the registers in
1477 the case of non-aligned structures, copy them in now. */
1479 else if (args[i].n_aligned_regs != 0)
1480 for (j = 0; j < args[i].n_aligned_regs; j++)
1481 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1482 args[i].aligned_regs[j]);
1484 else if (partial == 0 || args[i].pass_on_stack)
1485 move_block_to_reg (REGNO (reg),
1486 validize_mem (args[i].value), nregs,
1487 args[i].mode);
1489 /* Handle calls that pass values in multiple non-contiguous
1490 locations. The Irix 6 ABI has examples of this. */
1491 if (GET_CODE (reg) == PARALLEL)
1492 use_group_regs (call_fusage, reg);
1493 else if (nregs == -1)
1494 use_reg (call_fusage, reg);
1495 else
1496 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1501 /* Generate all the code for a function call
1502 and return an rtx for its value.
1503 Store the value in TARGET (specified as an rtx) if convenient.
1504 If the value is stored in TARGET then TARGET is returned.
1505 If IGNORE is nonzero, then we ignore the value of the function call. */
1508 expand_call (exp, target, ignore)
1509 tree exp;
1510 rtx target;
1511 int ignore;
1513 /* List of actual parameters. */
1514 tree actparms = TREE_OPERAND (exp, 1);
1515 /* RTX for the function to be called. */
1516 rtx funexp;
1517 /* Data type of the function. */
1518 tree funtype;
1519 /* Declaration of the function being called,
1520 or 0 if the function is computed (not known by name). */
1521 tree fndecl = 0;
1522 char *name = 0;
1524 /* Register in which non-BLKmode value will be returned,
1525 or 0 if no value or if value is BLKmode. */
1526 rtx valreg;
1527 /* Address where we should return a BLKmode value;
1528 0 if value not BLKmode. */
1529 rtx structure_value_addr = 0;
1530 /* Nonzero if that address is being passed by treating it as
1531 an extra, implicit first parameter. Otherwise,
1532 it is passed by being copied directly into struct_value_rtx. */
1533 int structure_value_addr_parm = 0;
1534 /* Size of aggregate value wanted, or zero if none wanted
1535 or if we are using the non-reentrant PCC calling convention
1536 or expecting the value in registers. */
1537 HOST_WIDE_INT struct_value_size = 0;
1538 /* Nonzero if called function returns an aggregate in memory PCC style,
1539 by returning the address of where to find it. */
1540 int pcc_struct_value = 0;
1542 /* Number of actual parameters in this call, including struct value addr. */
1543 int num_actuals;
1544 /* Number of named args. Args after this are anonymous ones
1545 and they must all go on the stack. */
1546 int n_named_args;
1548 /* Vector of information about each argument.
1549 Arguments are numbered in the order they will be pushed,
1550 not the order they are written. */
1551 struct arg_data *args;
1553 /* Total size in bytes of all the stack-parms scanned so far. */
1554 struct args_size args_size;
1555 /* Size of arguments before any adjustments (such as rounding). */
1556 int unadjusted_args_size;
1557 /* Data on reg parms scanned so far. */
1558 CUMULATIVE_ARGS args_so_far;
1559 /* Nonzero if a reg parm has been scanned. */
1560 int reg_parm_seen;
1561 /* Nonzero if this is an indirect function call. */
1563 /* Nonzero if we must avoid push-insns in the args for this call.
1564 If stack space is allocated for register parameters, but not by the
1565 caller, then it is preallocated in the fixed part of the stack frame.
1566 So the entire argument block must then be preallocated (i.e., we
1567 ignore PUSH_ROUNDING in that case). */
1569 #ifdef PUSH_ROUNDING
1570 int must_preallocate = 0;
1571 #else
1572 int must_preallocate = 1;
1573 #endif
1575 /* Size of the stack reserved for parameter registers. */
1576 int reg_parm_stack_space = 0;
1578 /* Address of space preallocated for stack parms
1579 (on machines that lack push insns), or 0 if space not preallocated. */
1580 rtx argblock = 0;
1582 /* Nonzero if it is plausible that this is a call to alloca. */
1583 int may_be_alloca;
1584 /* Nonzero if this is a call to malloc or a related function. */
1585 int is_malloc;
1586 /* Nonzero if this is a call to setjmp or a related function. */
1587 int returns_twice;
1588 /* Nonzero if this is a call to `longjmp'. */
1589 int is_longjmp;
1590 /* Nonzero if this is a call to an inline function. */
1591 int is_integrable = 0;
1592 /* Nonzero if this is a call to a `const' function.
1593 Note that only explicitly named functions are handled as `const' here. */
1594 int is_const = 0;
1595 /* Nonzero if this is a call to a `volatile' function. */
1596 int is_volatile = 0;
1597 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1598 /* Define the boundary of the register parm stack space that needs to be
1599 save, if any. */
1600 int low_to_save = -1, high_to_save;
1601 rtx save_area = 0; /* Place that it is saved */
1602 #endif
1604 #ifdef ACCUMULATE_OUTGOING_ARGS
1605 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1606 char *initial_stack_usage_map = stack_usage_map;
1607 int old_stack_arg_under_construction;
1608 #endif
1610 rtx old_stack_level = 0;
1611 int old_pending_adj = 0;
1612 int old_inhibit_defer_pop = inhibit_defer_pop;
1613 rtx call_fusage = 0;
1614 register tree p;
1615 register int i;
1617 /* The value of the function call can be put in a hard register. But
1618 if -fcheck-memory-usage, code which invokes functions (and thus
1619 damages some hard registers) can be inserted before using the value.
1620 So, target is always a pseudo-register in that case. */
1621 if (current_function_check_memory_usage)
1622 target = 0;
1624 /* See if we can find a DECL-node for the actual function.
1625 As a result, decide whether this is a call to an integrable function. */
1627 p = TREE_OPERAND (exp, 0);
1628 if (TREE_CODE (p) == ADDR_EXPR)
1630 fndecl = TREE_OPERAND (p, 0);
1631 if (TREE_CODE (fndecl) != FUNCTION_DECL)
1632 fndecl = 0;
1633 else
1635 if (!flag_no_inline
1636 && fndecl != current_function_decl
1637 && DECL_INLINE (fndecl)
1638 && DECL_SAVED_INSNS (fndecl)
1639 && RTX_INTEGRATED_P (DECL_SAVED_INSNS (fndecl)))
1640 is_integrable = 1;
1641 else if (! TREE_ADDRESSABLE (fndecl))
1643 /* In case this function later becomes inlinable,
1644 record that there was already a non-inline call to it.
1646 Use abstraction instead of setting TREE_ADDRESSABLE
1647 directly. */
1648 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1649 && optimize > 0)
1651 warning_with_decl (fndecl, "can't inline call to `%s'");
1652 warning ("called from here");
1654 mark_addressable (fndecl);
1657 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
1658 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
1659 is_const = 1;
1661 if (TREE_THIS_VOLATILE (fndecl))
1662 is_volatile = 1;
1666 /* If we don't have specific function to call, see if we have a
1667 constant or `noreturn' function from the type. */
1668 if (fndecl == 0)
1670 is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
1671 is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
1674 #ifdef REG_PARM_STACK_SPACE
1675 #ifdef MAYBE_REG_PARM_STACK_SPACE
1676 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1677 #else
1678 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1679 #endif
1680 #endif
1682 #if defined(PUSH_ROUNDING) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1683 if (reg_parm_stack_space > 0)
1684 must_preallocate = 1;
1685 #endif
1687 /* Warn if this value is an aggregate type,
1688 regardless of which calling convention we are using for it. */
1689 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
1690 warning ("function call has aggregate value");
1692 /* Set up a place to return a structure. */
1694 /* Cater to broken compilers. */
1695 if (aggregate_value_p (exp))
1697 /* This call returns a big structure. */
1698 is_const = 0;
1700 #ifdef PCC_STATIC_STRUCT_RETURN
1702 pcc_struct_value = 1;
1703 /* Easier than making that case work right. */
1704 if (is_integrable)
1706 /* In case this is a static function, note that it has been
1707 used. */
1708 if (! TREE_ADDRESSABLE (fndecl))
1709 mark_addressable (fndecl);
1710 is_integrable = 0;
1713 #else /* not PCC_STATIC_STRUCT_RETURN */
1715 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
1717 if (target && GET_CODE (target) == MEM)
1718 structure_value_addr = XEXP (target, 0);
1719 else
1721 /* Assign a temporary to hold the value. */
1722 tree d;
1724 /* For variable-sized objects, we must be called with a target
1725 specified. If we were to allocate space on the stack here,
1726 we would have no way of knowing when to free it. */
1728 if (struct_value_size < 0)
1729 abort ();
1731 /* This DECL is just something to feed to mark_addressable;
1732 it doesn't get pushed. */
1733 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1734 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
1735 mark_addressable (d);
1736 structure_value_addr = XEXP (DECL_RTL (d), 0);
1737 TREE_USED (d) = 1;
1738 target = 0;
1741 #endif /* not PCC_STATIC_STRUCT_RETURN */
1744 /* If called function is inline, try to integrate it. */
1746 if (is_integrable)
1748 rtx temp;
1749 #ifdef ACCUMULATE_OUTGOING_ARGS
1750 rtx before_call = get_last_insn ();
1751 #endif
1753 temp = expand_inline_function (fndecl, actparms, target,
1754 ignore, TREE_TYPE (exp),
1755 structure_value_addr);
1757 /* If inlining succeeded, return. */
1758 if (temp != (rtx) (HOST_WIDE_INT) -1)
1760 #ifdef ACCUMULATE_OUTGOING_ARGS
1761 /* If the outgoing argument list must be preserved, push
1762 the stack before executing the inlined function if it
1763 makes any calls. */
1765 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1766 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1767 break;
1769 if (stack_arg_under_construction || i >= 0)
1771 rtx first_insn
1772 = before_call ? NEXT_INSN (before_call) : get_insns ();
1773 rtx insn, seq;
1775 /* Look for a call in the inline function code.
1776 If OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) is
1777 nonzero then there is a call and it is not necessary
1778 to scan the insns. */
1780 if (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) == 0)
1781 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1782 if (GET_CODE (insn) == CALL_INSN)
1783 break;
1785 if (insn)
1787 /* Reserve enough stack space so that the largest
1788 argument list of any function call in the inline
1789 function does not overlap the argument list being
1790 evaluated. This is usually an overestimate because
1791 allocate_dynamic_stack_space reserves space for an
1792 outgoing argument list in addition to the requested
1793 space, but there is no way to ask for stack space such
1794 that an argument list of a certain length can be
1795 safely constructed.
1797 Add the stack space reserved for register arguments, if
1798 any, in the inline function. What is really needed is the
1799 largest value of reg_parm_stack_space in the inline
1800 function, but that is not available. Using the current
1801 value of reg_parm_stack_space is wrong, but gives
1802 correct results on all supported machines. */
1804 int adjust = (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl))
1805 + reg_parm_stack_space);
1807 start_sequence ();
1808 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1809 allocate_dynamic_stack_space (GEN_INT (adjust),
1810 NULL_RTX, BITS_PER_UNIT);
1811 seq = get_insns ();
1812 end_sequence ();
1813 emit_insns_before (seq, first_insn);
1814 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1817 #endif
1819 /* If the result is equivalent to TARGET, return TARGET to simplify
1820 checks in store_expr. They can be equivalent but not equal in the
1821 case of a function that returns BLKmode. */
1822 if (temp != target && rtx_equal_p (temp, target))
1823 return target;
1824 return temp;
1827 /* If inlining failed, mark FNDECL as needing to be compiled
1828 separately after all. If function was declared inline,
1829 give a warning. */
1830 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1831 && optimize > 0 && ! TREE_ADDRESSABLE (fndecl))
1833 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1834 warning ("called from here");
1836 mark_addressable (fndecl);
1839 function_call_count++;
1841 if (fndecl && DECL_NAME (fndecl))
1842 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
1844 /* See if this is a call to a function that can return more than once
1845 or a call to longjmp or malloc. */
1846 special_function_p (name, fndecl, &returns_twice, &is_longjmp,
1847 &is_malloc, &may_be_alloca);
1849 if (may_be_alloca)
1850 current_function_calls_alloca = 1;
1852 /* Operand 0 is a pointer-to-function; get the type of the function. */
1853 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
1854 if (! POINTER_TYPE_P (funtype))
1855 abort ();
1856 funtype = TREE_TYPE (funtype);
1858 /* When calling a const function, we must pop the stack args right away,
1859 so that the pop is deleted or moved with the call. */
1860 if (is_const)
1861 NO_DEFER_POP;
1863 /* Don't let pending stack adjusts add up to too much.
1864 Also, do all pending adjustments now
1865 if there is any chance this might be a call to alloca. */
1867 if (pending_stack_adjust >= 32
1868 || (pending_stack_adjust > 0 && may_be_alloca))
1869 do_pending_stack_adjust ();
1871 /* Push the temporary stack slot level so that we can free any temporaries
1872 we make. */
1873 push_temp_slots ();
1875 /* Start updating where the next arg would go.
1877 On some machines (such as the PA) indirect calls have a different
1878 calling convention than normal calls. The last argument in
1879 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
1880 or not. */
1881 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
1883 /* If struct_value_rtx is 0, it means pass the address
1884 as if it were an extra parameter. */
1885 if (structure_value_addr && struct_value_rtx == 0)
1887 /* If structure_value_addr is a REG other than
1888 virtual_outgoing_args_rtx, we can use always use it. If it
1889 is not a REG, we must always copy it into a register.
1890 If it is virtual_outgoing_args_rtx, we must copy it to another
1891 register in some cases. */
1892 rtx temp = (GET_CODE (structure_value_addr) != REG
1893 #ifdef ACCUMULATE_OUTGOING_ARGS
1894 || (stack_arg_under_construction
1895 && structure_value_addr == virtual_outgoing_args_rtx)
1896 #endif
1897 ? copy_addr_to_reg (structure_value_addr)
1898 : structure_value_addr);
1900 actparms
1901 = tree_cons (error_mark_node,
1902 make_tree (build_pointer_type (TREE_TYPE (funtype)),
1903 temp),
1904 actparms);
1905 structure_value_addr_parm = 1;
1908 /* Count the arguments and set NUM_ACTUALS. */
1909 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
1910 num_actuals = i;
1912 /* Compute number of named args.
1913 Normally, don't include the last named arg if anonymous args follow.
1914 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
1915 (If no anonymous args follow, the result of list_length is actually
1916 one too large. This is harmless.)
1918 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
1919 zero, this machine will be able to place unnamed args that were passed in
1920 registers into the stack. So treat all args as named. This allows the
1921 insns emitting for a specific argument list to be independent of the
1922 function declaration.
1924 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any reliable
1925 way to pass unnamed args in registers, so we must force them into
1926 memory. */
1928 if ((STRICT_ARGUMENT_NAMING
1929 || ! PRETEND_OUTGOING_VARARGS_NAMED)
1930 && TYPE_ARG_TYPES (funtype) != 0)
1931 n_named_args
1932 = (list_length (TYPE_ARG_TYPES (funtype))
1933 /* Don't include the last named arg. */
1934 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
1935 /* Count the struct value address, if it is passed as a parm. */
1936 + structure_value_addr_parm);
1937 else
1938 /* If we know nothing, treat all args as named. */
1939 n_named_args = num_actuals;
1941 /* Make a vector to hold all the information about each arg. */
1942 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
1943 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
1945 /* Build up entries inthe ARGS array, compute the size of the arguments
1946 into ARGS_SIZE, etc. */
1947 initialize_argument_information (num_actuals, args, &args_size, n_named_args,
1948 actparms, fndecl, &args_so_far,
1949 reg_parm_stack_space, &old_stack_level,
1950 &old_pending_adj, &must_preallocate,
1951 &is_const);
1953 #ifdef FINAL_REG_PARM_STACK_SPACE
1954 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
1955 args_size.var);
1956 #endif
1958 if (args_size.var)
1960 /* If this function requires a variable-sized argument list, don't try to
1961 make a cse'able block for this call. We may be able to do this
1962 eventually, but it is too complicated to keep track of what insns go
1963 in the cse'able block and which don't. */
1965 is_const = 0;
1966 must_preallocate = 1;
1969 /* Compute the actual size of the argument block required. The variable
1970 and constant sizes must be combined, the size may have to be rounded,
1971 and there may be a minimum required size. */
1972 unadjusted_args_size
1973 = compute_argument_block_size (reg_parm_stack_space, &args_size);
1975 /* Now make final decision about preallocating stack space. */
1976 must_preallocate = finalize_must_preallocate (must_preallocate,
1977 num_actuals, args, &args_size);
1979 /* If the structure value address will reference the stack pointer, we must
1980 stabilize it. We don't need to do this if we know that we are not going
1981 to adjust the stack pointer in processing this call. */
1983 if (structure_value_addr
1984 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
1985 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
1986 && (args_size.var
1987 #ifndef ACCUMULATE_OUTGOING_ARGS
1988 || args_size.constant
1989 #endif
1991 structure_value_addr = copy_to_reg (structure_value_addr);
1993 /* Precompute any arguments as needed. */
1994 precompute_arguments (is_const, must_preallocate, num_actuals,
1995 args, &args_size);
1997 /* Now we are about to start emitting insns that can be deleted
1998 if a libcall is deleted. */
1999 if (is_const || is_malloc)
2000 start_sequence ();
2002 /* If we have no actual push instructions, or shouldn't use them,
2003 make space for all args right now. */
2005 if (args_size.var != 0)
2007 if (old_stack_level == 0)
2009 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2010 old_pending_adj = pending_stack_adjust;
2011 pending_stack_adjust = 0;
2012 #ifdef ACCUMULATE_OUTGOING_ARGS
2013 /* stack_arg_under_construction says whether a stack arg is
2014 being constructed at the old stack level. Pushing the stack
2015 gets a clean outgoing argument block. */
2016 old_stack_arg_under_construction = stack_arg_under_construction;
2017 stack_arg_under_construction = 0;
2018 #endif
2020 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
2022 else
2024 /* Note that we must go through the motions of allocating an argument
2025 block even if the size is zero because we may be storing args
2026 in the area reserved for register arguments, which may be part of
2027 the stack frame. */
2029 int needed = args_size.constant;
2031 /* Store the maximum argument space used. It will be pushed by
2032 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2033 checking). */
2035 if (needed > current_function_outgoing_args_size)
2036 current_function_outgoing_args_size = needed;
2038 if (must_preallocate)
2040 #ifdef ACCUMULATE_OUTGOING_ARGS
2041 /* Since the stack pointer will never be pushed, it is possible for
2042 the evaluation of a parm to clobber something we have already
2043 written to the stack. Since most function calls on RISC machines
2044 do not use the stack, this is uncommon, but must work correctly.
2046 Therefore, we save any area of the stack that was already written
2047 and that we are using. Here we set up to do this by making a new
2048 stack usage map from the old one. The actual save will be done
2049 by store_one_arg.
2051 Another approach might be to try to reorder the argument
2052 evaluations to avoid this conflicting stack usage. */
2054 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2055 /* Since we will be writing into the entire argument area, the
2056 map must be allocated for its entire size, not just the part that
2057 is the responsibility of the caller. */
2058 needed += reg_parm_stack_space;
2059 #endif
2061 #ifdef ARGS_GROW_DOWNWARD
2062 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2063 needed + 1);
2064 #else
2065 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2066 needed);
2067 #endif
2068 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2070 if (initial_highest_arg_in_use)
2071 bcopy (initial_stack_usage_map, stack_usage_map,
2072 initial_highest_arg_in_use);
2074 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2075 bzero (&stack_usage_map[initial_highest_arg_in_use],
2076 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2077 needed = 0;
2079 /* The address of the outgoing argument list must not be copied to a
2080 register here, because argblock would be left pointing to the
2081 wrong place after the call to allocate_dynamic_stack_space below.
2084 argblock = virtual_outgoing_args_rtx;
2086 #else /* not ACCUMULATE_OUTGOING_ARGS */
2087 if (inhibit_defer_pop == 0)
2089 /* Try to reuse some or all of the pending_stack_adjust
2090 to get this space. Maybe we can avoid any pushing. */
2091 if (needed > pending_stack_adjust)
2093 needed -= pending_stack_adjust;
2094 pending_stack_adjust = 0;
2096 else
2098 pending_stack_adjust -= needed;
2099 needed = 0;
2102 /* Special case this because overhead of `push_block' in this
2103 case is non-trivial. */
2104 if (needed == 0)
2105 argblock = virtual_outgoing_args_rtx;
2106 else
2107 argblock = push_block (GEN_INT (needed), 0, 0);
2109 /* We only really need to call `copy_to_reg' in the case where push
2110 insns are going to be used to pass ARGBLOCK to a function
2111 call in ARGS. In that case, the stack pointer changes value
2112 from the allocation point to the call point, and hence
2113 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
2114 But might as well always do it. */
2115 argblock = copy_to_reg (argblock);
2116 #endif /* not ACCUMULATE_OUTGOING_ARGS */
2120 #ifdef ACCUMULATE_OUTGOING_ARGS
2121 /* The save/restore code in store_one_arg handles all cases except one:
2122 a constructor call (including a C function returning a BLKmode struct)
2123 to initialize an argument. */
2124 if (stack_arg_under_construction)
2126 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2127 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
2128 #else
2129 rtx push_size = GEN_INT (args_size.constant);
2130 #endif
2131 if (old_stack_level == 0)
2133 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2134 old_pending_adj = pending_stack_adjust;
2135 pending_stack_adjust = 0;
2136 /* stack_arg_under_construction says whether a stack arg is
2137 being constructed at the old stack level. Pushing the stack
2138 gets a clean outgoing argument block. */
2139 old_stack_arg_under_construction = stack_arg_under_construction;
2140 stack_arg_under_construction = 0;
2141 /* Make a new map for the new argument list. */
2142 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
2143 bzero (stack_usage_map, highest_outgoing_arg_in_use);
2144 highest_outgoing_arg_in_use = 0;
2146 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
2148 /* If argument evaluation might modify the stack pointer, copy the
2149 address of the argument list to a register. */
2150 for (i = 0; i < num_actuals; i++)
2151 if (args[i].pass_on_stack)
2153 argblock = copy_addr_to_reg (argblock);
2154 break;
2156 #endif
2158 compute_argument_addresses (args, argblock, num_actuals);
2160 #ifdef PUSH_ARGS_REVERSED
2161 #ifdef PREFERRED_STACK_BOUNDARY
2162 /* If we push args individually in reverse order, perform stack alignment
2163 before the first push (the last arg). */
2164 if (argblock == 0)
2165 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
2166 #endif
2167 #endif
2169 /* Don't try to defer pops if preallocating, not even from the first arg,
2170 since ARGBLOCK probably refers to the SP. */
2171 if (argblock)
2172 NO_DEFER_POP;
2174 funexp = rtx_for_function_call (fndecl, exp);
2176 /* Figure out the register where the value, if any, will come back. */
2177 valreg = 0;
2178 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2179 && ! structure_value_addr)
2181 if (pcc_struct_value)
2182 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2183 fndecl);
2184 else
2185 valreg = hard_function_value (TREE_TYPE (exp), fndecl);
2188 /* Precompute all register parameters. It isn't safe to compute anything
2189 once we have started filling any specific hard regs. */
2190 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2192 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2194 /* Save the fixed argument area if it's part of the caller's frame and
2195 is clobbered by argument setup for this call. */
2196 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2197 &low_to_save, &high_to_save);
2198 #endif
2201 /* Now store (and compute if necessary) all non-register parms.
2202 These come before register parms, since they can require block-moves,
2203 which could clobber the registers used for register parms.
2204 Parms which have partial registers are not stored here,
2205 but we do preallocate space here if they want that. */
2207 for (i = 0; i < num_actuals; i++)
2208 if (args[i].reg == 0 || args[i].pass_on_stack)
2209 store_one_arg (&args[i], argblock, may_be_alloca,
2210 args_size.var != 0, reg_parm_stack_space);
2212 /* If we have a parm that is passed in registers but not in memory
2213 and whose alignment does not permit a direct copy into registers,
2214 make a group of pseudos that correspond to each register that we
2215 will later fill. */
2216 if (STRICT_ALIGNMENT)
2217 store_unaligned_arguments_into_pseudos (args, num_actuals);
2219 /* Now store any partially-in-registers parm.
2220 This is the last place a block-move can happen. */
2221 if (reg_parm_seen)
2222 for (i = 0; i < num_actuals; i++)
2223 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2224 store_one_arg (&args[i], argblock, may_be_alloca,
2225 args_size.var != 0, reg_parm_stack_space);
2227 #ifndef PUSH_ARGS_REVERSED
2228 #ifdef PREFERRED_STACK_BOUNDARY
2229 /* If we pushed args in forward order, perform stack alignment
2230 after pushing the last arg. */
2231 if (argblock == 0)
2232 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
2233 #endif
2234 #endif
2236 /* If register arguments require space on the stack and stack space
2237 was not preallocated, allocate stack space here for arguments
2238 passed in registers. */
2239 #if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
2240 if (must_preallocate == 0 && reg_parm_stack_space > 0)
2241 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2242 #endif
2244 /* Pass the function the address in which to return a structure value. */
2245 if (structure_value_addr && ! structure_value_addr_parm)
2247 emit_move_insn (struct_value_rtx,
2248 force_reg (Pmode,
2249 force_operand (structure_value_addr,
2250 NULL_RTX)));
2252 /* Mark the memory for the aggregate as write-only. */
2253 if (current_function_check_memory_usage)
2254 emit_library_call (chkr_set_right_libfunc, 1,
2255 VOIDmode, 3,
2256 structure_value_addr, ptr_mode,
2257 GEN_INT (struct_value_size), TYPE_MODE (sizetype),
2258 GEN_INT (MEMORY_USE_WO),
2259 TYPE_MODE (integer_type_node));
2261 if (GET_CODE (struct_value_rtx) == REG)
2262 use_reg (&call_fusage, struct_value_rtx);
2265 funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
2267 load_register_parameters (args, num_actuals, &call_fusage);
2269 /* Perform postincrements before actually calling the function. */
2270 emit_queue ();
2272 /* All arguments and registers used for the call must be set up by now! */
2274 /* Generate the actual call instruction. */
2275 emit_call_1 (funexp, fndecl, funtype, args_size.constant, struct_value_size,
2276 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2277 valreg, old_inhibit_defer_pop, call_fusage, is_const);
2279 /* If call is cse'able, make appropriate pair of reg-notes around it.
2280 Test valreg so we don't crash; may safely ignore `const'
2281 if return type is void. Disable for PARALLEL return values, because
2282 we have no way to move such values into a pseudo register. */
2283 if (is_const && valreg != 0 && GET_CODE (valreg) != PARALLEL)
2285 rtx note = 0;
2286 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2287 rtx insns;
2289 /* Mark the return value as a pointer if needed. */
2290 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2292 tree pointed_to = TREE_TYPE (TREE_TYPE (exp));
2293 mark_reg_pointer (temp, TYPE_ALIGN (pointed_to) / BITS_PER_UNIT);
2296 /* Construct an "equal form" for the value which mentions all the
2297 arguments in order as well as the function name. */
2298 #ifdef PUSH_ARGS_REVERSED
2299 for (i = 0; i < num_actuals; i++)
2300 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2301 #else
2302 for (i = num_actuals - 1; i >= 0; i--)
2303 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2304 #endif
2305 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2307 insns = get_insns ();
2308 end_sequence ();
2310 emit_libcall_block (insns, temp, valreg, note);
2312 valreg = temp;
2314 else if (is_const)
2316 /* Otherwise, just write out the sequence without a note. */
2317 rtx insns = get_insns ();
2319 end_sequence ();
2320 emit_insns (insns);
2322 else if (is_malloc)
2324 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2325 rtx last, insns;
2327 /* The return value from a malloc-like function is a pointer. */
2328 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2329 mark_reg_pointer (temp, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2331 emit_move_insn (temp, valreg);
2333 /* The return value from a malloc-like function can not alias
2334 anything else. */
2335 last = get_last_insn ();
2336 REG_NOTES (last) =
2337 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2339 /* Write out the sequence. */
2340 insns = get_insns ();
2341 end_sequence ();
2342 emit_insns (insns);
2343 valreg = temp;
2346 /* For calls to `setjmp', etc., inform flow.c it should complain
2347 if nonvolatile values are live. */
2349 if (returns_twice)
2351 emit_note (name, NOTE_INSN_SETJMP);
2352 current_function_calls_setjmp = 1;
2355 if (is_longjmp)
2356 current_function_calls_longjmp = 1;
2358 /* Notice functions that cannot return.
2359 If optimizing, insns emitted below will be dead.
2360 If not optimizing, they will exist, which is useful
2361 if the user uses the `return' command in the debugger. */
2363 if (is_volatile || is_longjmp)
2364 emit_barrier ();
2366 /* If value type not void, return an rtx for the value. */
2368 /* If there are cleanups to be called, don't use a hard reg as target.
2369 We need to double check this and see if it matters anymore. */
2370 if (any_pending_cleanups (1)
2371 && target && REG_P (target)
2372 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2373 target = 0;
2375 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2376 || ignore)
2378 target = const0_rtx;
2380 else if (structure_value_addr)
2382 if (target == 0 || GET_CODE (target) != MEM)
2384 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2385 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2386 structure_value_addr));
2387 MEM_SET_IN_STRUCT_P (target,
2388 AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2391 else if (pcc_struct_value)
2393 /* This is the special C++ case where we need to
2394 know what the true target was. We take care to
2395 never use this value more than once in one expression. */
2396 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2397 copy_to_reg (valreg));
2398 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2400 /* Handle calls that return values in multiple non-contiguous locations.
2401 The Irix 6 ABI has examples of this. */
2402 else if (GET_CODE (valreg) == PARALLEL)
2404 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2406 if (target == 0)
2408 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)), bytes, 0);
2409 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2410 preserve_temp_slots (target);
2413 emit_group_store (target, valreg, bytes,
2414 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2416 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2417 && GET_MODE (target) == GET_MODE (valreg))
2418 /* TARGET and VALREG cannot be equal at this point because the latter
2419 would not have REG_FUNCTION_VALUE_P true, while the former would if
2420 it were referring to the same register.
2422 If they refer to the same register, this move will be a no-op, except
2423 when function inlining is being done. */
2424 emit_move_insn (target, valreg);
2425 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2426 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
2427 else
2428 target = copy_to_reg (valreg);
2430 #ifdef PROMOTE_FUNCTION_RETURN
2431 /* If we promoted this return value, make the proper SUBREG. TARGET
2432 might be const0_rtx here, so be careful. */
2433 if (GET_CODE (target) == REG
2434 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2435 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2437 tree type = TREE_TYPE (exp);
2438 int unsignedp = TREE_UNSIGNED (type);
2440 /* If we don't promote as expected, something is wrong. */
2441 if (GET_MODE (target)
2442 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
2443 abort ();
2445 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
2446 SUBREG_PROMOTED_VAR_P (target) = 1;
2447 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2449 #endif
2451 /* If size of args is variable or this was a constructor call for a stack
2452 argument, restore saved stack-pointer value. */
2454 if (old_stack_level)
2456 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2457 pending_stack_adjust = old_pending_adj;
2458 #ifdef ACCUMULATE_OUTGOING_ARGS
2459 stack_arg_under_construction = old_stack_arg_under_construction;
2460 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2461 stack_usage_map = initial_stack_usage_map;
2462 #endif
2464 #ifdef ACCUMULATE_OUTGOING_ARGS
2465 else
2467 #ifdef REG_PARM_STACK_SPACE
2468 if (save_area)
2469 restore_fixed_argument_area (save_area, argblock,
2470 high_to_save, low_to_save);
2471 #endif
2473 /* If we saved any argument areas, restore them. */
2474 for (i = 0; i < num_actuals; i++)
2475 if (args[i].save_area)
2477 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2478 rtx stack_area
2479 = gen_rtx_MEM (save_mode,
2480 memory_address (save_mode,
2481 XEXP (args[i].stack_slot, 0)));
2483 if (save_mode != BLKmode)
2484 emit_move_insn (stack_area, args[i].save_area);
2485 else
2486 emit_block_move (stack_area, validize_mem (args[i].save_area),
2487 GEN_INT (args[i].size.constant),
2488 PARM_BOUNDARY / BITS_PER_UNIT);
2491 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2492 stack_usage_map = initial_stack_usage_map;
2494 #endif
2496 /* If this was alloca, record the new stack level for nonlocal gotos.
2497 Check for the handler slots since we might not have a save area
2498 for non-local gotos. */
2500 if (may_be_alloca && nonlocal_goto_handler_slots != 0)
2501 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
2503 pop_temp_slots ();
2505 /* Free up storage we no longer need. */
2506 for (i = 0; i < num_actuals; ++i)
2507 if (args[i].aligned_regs)
2508 free (args[i].aligned_regs);
2510 return target;
2513 /* Output a library call to function FUN (a SYMBOL_REF rtx)
2514 (emitting the queue unless NO_QUEUE is nonzero),
2515 for a value of mode OUTMODE,
2516 with NARGS different arguments, passed as alternating rtx values
2517 and machine_modes to convert them to.
2518 The rtx values should have been passed through protect_from_queue already.
2520 NO_QUEUE will be true if and only if the library call is a `const' call
2521 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2522 to the variable is_const in expand_call.
2524 NO_QUEUE must be true for const calls, because if it isn't, then
2525 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2526 and will be lost if the libcall sequence is optimized away.
2528 NO_QUEUE must be false for non-const calls, because if it isn't, the
2529 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2530 optimized. For instance, the instruction scheduler may incorrectly
2531 move memory references across the non-const call. */
2533 void
2534 emit_library_call VPROTO((rtx orgfun, int no_queue, enum machine_mode outmode,
2535 int nargs, ...))
2537 #ifndef ANSI_PROTOTYPES
2538 rtx orgfun;
2539 int no_queue;
2540 enum machine_mode outmode;
2541 int nargs;
2542 #endif
2543 va_list p;
2544 /* Total size in bytes of all the stack-parms scanned so far. */
2545 struct args_size args_size;
2546 /* Size of arguments before any adjustments (such as rounding). */
2547 struct args_size original_args_size;
2548 register int argnum;
2549 rtx fun;
2550 int inc;
2551 int count;
2552 rtx argblock = 0;
2553 CUMULATIVE_ARGS args_so_far;
2554 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2555 struct args_size offset; struct args_size size; rtx save_area; };
2556 struct arg *argvec;
2557 int old_inhibit_defer_pop = inhibit_defer_pop;
2558 rtx call_fusage = 0;
2559 int reg_parm_stack_space = 0;
2560 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2561 /* Define the boundary of the register parm stack space that needs to be
2562 save, if any. */
2563 int low_to_save = -1, high_to_save;
2564 rtx save_area = 0; /* Place that it is saved */
2565 #endif
2567 #ifdef ACCUMULATE_OUTGOING_ARGS
2568 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2569 char *initial_stack_usage_map = stack_usage_map;
2570 int needed;
2571 #endif
2573 #ifdef REG_PARM_STACK_SPACE
2574 /* Size of the stack reserved for parameter registers. */
2575 #ifdef MAYBE_REG_PARM_STACK_SPACE
2576 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2577 #else
2578 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
2579 #endif
2580 #endif
2582 VA_START (p, nargs);
2584 #ifndef ANSI_PROTOTYPES
2585 orgfun = va_arg (p, rtx);
2586 no_queue = va_arg (p, int);
2587 outmode = va_arg (p, enum machine_mode);
2588 nargs = va_arg (p, int);
2589 #endif
2591 fun = orgfun;
2593 /* Copy all the libcall-arguments out of the varargs data
2594 and into a vector ARGVEC.
2596 Compute how to pass each argument. We only support a very small subset
2597 of the full argument passing conventions to limit complexity here since
2598 library functions shouldn't have many args. */
2600 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2601 bzero ((char *) argvec, nargs * sizeof (struct arg));
2604 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
2606 args_size.constant = 0;
2607 args_size.var = 0;
2609 push_temp_slots ();
2611 for (count = 0; count < nargs; count++)
2613 rtx val = va_arg (p, rtx);
2614 enum machine_mode mode = va_arg (p, enum machine_mode);
2616 /* We cannot convert the arg value to the mode the library wants here;
2617 must do it earlier where we know the signedness of the arg. */
2618 if (mode == BLKmode
2619 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2620 abort ();
2622 /* On some machines, there's no way to pass a float to a library fcn.
2623 Pass it as a double instead. */
2624 #ifdef LIBGCC_NEEDS_DOUBLE
2625 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2626 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
2627 #endif
2629 /* There's no need to call protect_from_queue, because
2630 either emit_move_insn or emit_push_insn will do that. */
2632 /* Make sure it is a reasonable operand for a move or push insn. */
2633 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2634 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2635 val = force_operand (val, NULL_RTX);
2637 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2638 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2640 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2641 be viewed as just an efficiency improvement. */
2642 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2643 emit_move_insn (slot, val);
2644 val = force_operand (XEXP (slot, 0), NULL_RTX);
2645 mode = Pmode;
2647 #endif
2649 argvec[count].value = val;
2650 argvec[count].mode = mode;
2652 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2653 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
2654 abort ();
2655 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2656 argvec[count].partial
2657 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2658 #else
2659 argvec[count].partial = 0;
2660 #endif
2662 locate_and_pad_parm (mode, NULL_TREE,
2663 argvec[count].reg && argvec[count].partial == 0,
2664 NULL_TREE, &args_size, &argvec[count].offset,
2665 &argvec[count].size);
2667 if (argvec[count].size.var)
2668 abort ();
2670 if (reg_parm_stack_space == 0 && argvec[count].partial)
2671 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2673 if (argvec[count].reg == 0 || argvec[count].partial != 0
2674 || reg_parm_stack_space > 0)
2675 args_size.constant += argvec[count].size.constant;
2677 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
2679 va_end (p);
2681 #ifdef FINAL_REG_PARM_STACK_SPACE
2682 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2683 args_size.var);
2684 #endif
2686 /* If this machine requires an external definition for library
2687 functions, write one out. */
2688 assemble_external_libcall (fun);
2690 original_args_size = args_size;
2691 #ifdef PREFERRED_STACK_BOUNDARY
2692 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2693 / STACK_BYTES) * STACK_BYTES);
2694 #endif
2696 args_size.constant = MAX (args_size.constant,
2697 reg_parm_stack_space);
2699 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2700 args_size.constant -= reg_parm_stack_space;
2701 #endif
2703 if (args_size.constant > current_function_outgoing_args_size)
2704 current_function_outgoing_args_size = args_size.constant;
2706 #ifdef ACCUMULATE_OUTGOING_ARGS
2707 /* Since the stack pointer will never be pushed, it is possible for
2708 the evaluation of a parm to clobber something we have already
2709 written to the stack. Since most function calls on RISC machines
2710 do not use the stack, this is uncommon, but must work correctly.
2712 Therefore, we save any area of the stack that was already written
2713 and that we are using. Here we set up to do this by making a new
2714 stack usage map from the old one.
2716 Another approach might be to try to reorder the argument
2717 evaluations to avoid this conflicting stack usage. */
2719 needed = args_size.constant;
2721 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2722 /* Since we will be writing into the entire argument area, the
2723 map must be allocated for its entire size, not just the part that
2724 is the responsibility of the caller. */
2725 needed += reg_parm_stack_space;
2726 #endif
2728 #ifdef ARGS_GROW_DOWNWARD
2729 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2730 needed + 1);
2731 #else
2732 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2733 needed);
2734 #endif
2735 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2737 if (initial_highest_arg_in_use)
2738 bcopy (initial_stack_usage_map, stack_usage_map,
2739 initial_highest_arg_in_use);
2741 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2742 bzero (&stack_usage_map[initial_highest_arg_in_use],
2743 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2744 needed = 0;
2746 /* The address of the outgoing argument list must not be copied to a
2747 register here, because argblock would be left pointing to the
2748 wrong place after the call to allocate_dynamic_stack_space below.
2751 argblock = virtual_outgoing_args_rtx;
2752 #else /* not ACCUMULATE_OUTGOING_ARGS */
2753 #ifndef PUSH_ROUNDING
2754 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2755 #endif
2756 #endif
2758 #ifdef PUSH_ARGS_REVERSED
2759 #ifdef PREFERRED_STACK_BOUNDARY
2760 /* If we push args individually in reverse order, perform stack alignment
2761 before the first push (the last arg). */
2762 if (argblock == 0)
2763 anti_adjust_stack (GEN_INT (args_size.constant
2764 - original_args_size.constant));
2765 #endif
2766 #endif
2768 #ifdef PUSH_ARGS_REVERSED
2769 inc = -1;
2770 argnum = nargs - 1;
2771 #else
2772 inc = 1;
2773 argnum = 0;
2774 #endif
2776 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2777 /* The argument list is the property of the called routine and it
2778 may clobber it. If the fixed area has been used for previous
2779 parameters, we must save and restore it.
2781 Here we compute the boundary of the that needs to be saved, if any. */
2783 #ifdef ARGS_GROW_DOWNWARD
2784 for (count = 0; count < reg_parm_stack_space + 1; count++)
2785 #else
2786 for (count = 0; count < reg_parm_stack_space; count++)
2787 #endif
2789 if (count >= highest_outgoing_arg_in_use
2790 || stack_usage_map[count] == 0)
2791 continue;
2793 if (low_to_save == -1)
2794 low_to_save = count;
2796 high_to_save = count;
2799 if (low_to_save >= 0)
2801 int num_to_save = high_to_save - low_to_save + 1;
2802 enum machine_mode save_mode
2803 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
2804 rtx stack_area;
2806 /* If we don't have the required alignment, must do this in BLKmode. */
2807 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
2808 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
2809 save_mode = BLKmode;
2811 #ifdef ARGS_GROW_DOWNWARD
2812 stack_area = gen_rtx_MEM (save_mode,
2813 memory_address (save_mode,
2814 plus_constant (argblock,
2815 - high_to_save)));
2816 #else
2817 stack_area = gen_rtx_MEM (save_mode,
2818 memory_address (save_mode,
2819 plus_constant (argblock,
2820 low_to_save)));
2821 #endif
2822 if (save_mode == BLKmode)
2824 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
2825 emit_block_move (validize_mem (save_area), stack_area,
2826 GEN_INT (num_to_save),
2827 PARM_BOUNDARY / BITS_PER_UNIT);
2829 else
2831 save_area = gen_reg_rtx (save_mode);
2832 emit_move_insn (save_area, stack_area);
2835 #endif
2837 /* Push the args that need to be pushed. */
2839 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2840 are to be pushed. */
2841 for (count = 0; count < nargs; count++, argnum += inc)
2843 register enum machine_mode mode = argvec[argnum].mode;
2844 register rtx val = argvec[argnum].value;
2845 rtx reg = argvec[argnum].reg;
2846 int partial = argvec[argnum].partial;
2847 #ifdef ACCUMULATE_OUTGOING_ARGS
2848 int lower_bound, upper_bound, i;
2849 #endif
2851 if (! (reg != 0 && partial == 0))
2853 #ifdef ACCUMULATE_OUTGOING_ARGS
2854 /* If this is being stored into a pre-allocated, fixed-size, stack
2855 area, save any previous data at that location. */
2857 #ifdef ARGS_GROW_DOWNWARD
2858 /* stack_slot is negative, but we want to index stack_usage_map
2859 with positive values. */
2860 upper_bound = -argvec[argnum].offset.constant + 1;
2861 lower_bound = upper_bound - argvec[argnum].size.constant;
2862 #else
2863 lower_bound = argvec[argnum].offset.constant;
2864 upper_bound = lower_bound + argvec[argnum].size.constant;
2865 #endif
2867 for (i = lower_bound; i < upper_bound; i++)
2868 if (stack_usage_map[i]
2869 /* Don't store things in the fixed argument area at this point;
2870 it has already been saved. */
2871 && i > reg_parm_stack_space)
2872 break;
2874 if (i != upper_bound)
2876 /* We need to make a save area. See what mode we can make it. */
2877 enum machine_mode save_mode
2878 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
2879 MODE_INT, 1);
2880 rtx stack_area
2881 = gen_rtx_MEM (save_mode,
2882 memory_address (save_mode,
2883 plus_constant (argblock, argvec[argnum].offset.constant)));
2884 argvec[argnum].save_area = gen_reg_rtx (save_mode);
2885 emit_move_insn (argvec[argnum].save_area, stack_area);
2887 #endif
2888 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2889 argblock, GEN_INT (argvec[argnum].offset.constant),
2890 reg_parm_stack_space);
2892 #ifdef ACCUMULATE_OUTGOING_ARGS
2893 /* Now mark the segment we just used. */
2894 for (i = lower_bound; i < upper_bound; i++)
2895 stack_usage_map[i] = 1;
2896 #endif
2898 NO_DEFER_POP;
2902 #ifndef PUSH_ARGS_REVERSED
2903 #ifdef PREFERRED_STACK_BOUNDARY
2904 /* If we pushed args in forward order, perform stack alignment
2905 after pushing the last arg. */
2906 if (argblock == 0)
2907 anti_adjust_stack (GEN_INT (args_size.constant
2908 - original_args_size.constant));
2909 #endif
2910 #endif
2912 #ifdef PUSH_ARGS_REVERSED
2913 argnum = nargs - 1;
2914 #else
2915 argnum = 0;
2916 #endif
2918 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
2920 /* Now load any reg parms into their regs. */
2922 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2923 are to be pushed. */
2924 for (count = 0; count < nargs; count++, argnum += inc)
2926 register rtx val = argvec[argnum].value;
2927 rtx reg = argvec[argnum].reg;
2928 int partial = argvec[argnum].partial;
2930 if (reg != 0 && partial == 0)
2931 emit_move_insn (reg, val);
2932 NO_DEFER_POP;
2935 /* For version 1.37, try deleting this entirely. */
2936 if (! no_queue)
2937 emit_queue ();
2939 /* Any regs containing parms remain in use through the call. */
2940 for (count = 0; count < nargs; count++)
2941 if (argvec[count].reg != 0)
2942 use_reg (&call_fusage, argvec[count].reg);
2944 /* Don't allow popping to be deferred, since then
2945 cse'ing of library calls could delete a call and leave the pop. */
2946 NO_DEFER_POP;
2948 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2949 will set inhibit_defer_pop to that value. */
2951 /* The return type is needed to decide how many bytes the function pops.
2952 Signedness plays no role in that, so for simplicity, we pretend it's
2953 always signed. We also assume that the list of arguments passed has
2954 no impact, so we pretend it is unknown. */
2956 emit_call_1 (fun,
2957 get_identifier (XSTR (orgfun, 0)),
2958 build_function_type (outmode == VOIDmode ? void_type_node
2959 : type_for_mode (outmode, 0), NULL_TREE),
2960 args_size.constant, 0,
2961 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2962 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2963 old_inhibit_defer_pop + 1, call_fusage, no_queue);
2965 pop_temp_slots ();
2967 /* Now restore inhibit_defer_pop to its actual original value. */
2968 OK_DEFER_POP;
2970 #ifdef ACCUMULATE_OUTGOING_ARGS
2971 #ifdef REG_PARM_STACK_SPACE
2972 if (save_area)
2974 enum machine_mode save_mode = GET_MODE (save_area);
2975 #ifdef ARGS_GROW_DOWNWARD
2976 rtx stack_area
2977 = gen_rtx_MEM (save_mode,
2978 memory_address (save_mode,
2979 plus_constant (argblock,
2980 - high_to_save)));
2981 #else
2982 rtx stack_area
2983 = gen_rtx_MEM (save_mode,
2984 memory_address (save_mode,
2985 plus_constant (argblock, low_to_save)));
2986 #endif
2988 if (save_mode != BLKmode)
2989 emit_move_insn (stack_area, save_area);
2990 else
2991 emit_block_move (stack_area, validize_mem (save_area),
2992 GEN_INT (high_to_save - low_to_save + 1),
2993 PARM_BOUNDARY / BITS_PER_UNIT);
2995 #endif
2997 /* If we saved any argument areas, restore them. */
2998 for (count = 0; count < nargs; count++)
2999 if (argvec[count].save_area)
3001 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3002 rtx stack_area
3003 = gen_rtx_MEM (save_mode,
3004 memory_address (save_mode,
3005 plus_constant (argblock, argvec[count].offset.constant)));
3007 emit_move_insn (stack_area, argvec[count].save_area);
3010 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3011 stack_usage_map = initial_stack_usage_map;
3012 #endif
3015 /* Like emit_library_call except that an extra argument, VALUE,
3016 comes second and says where to store the result.
3017 (If VALUE is zero, this function chooses a convenient way
3018 to return the value.
3020 This function returns an rtx for where the value is to be found.
3021 If VALUE is nonzero, VALUE is returned. */
3024 emit_library_call_value VPROTO((rtx orgfun, rtx value, int no_queue,
3025 enum machine_mode outmode, int nargs, ...))
3027 #ifndef ANSI_PROTOTYPES
3028 rtx orgfun;
3029 rtx value;
3030 int no_queue;
3031 enum machine_mode outmode;
3032 int nargs;
3033 #endif
3034 va_list p;
3035 /* Total size in bytes of all the stack-parms scanned so far. */
3036 struct args_size args_size;
3037 /* Size of arguments before any adjustments (such as rounding). */
3038 struct args_size original_args_size;
3039 register int argnum;
3040 rtx fun;
3041 int inc;
3042 int count;
3043 rtx argblock = 0;
3044 CUMULATIVE_ARGS args_so_far;
3045 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
3046 struct args_size offset; struct args_size size; rtx save_area; };
3047 struct arg *argvec;
3048 int old_inhibit_defer_pop = inhibit_defer_pop;
3049 rtx call_fusage = 0;
3050 rtx mem_value = 0;
3051 int pcc_struct_value = 0;
3052 int struct_value_size = 0;
3053 int is_const;
3054 int reg_parm_stack_space = 0;
3055 #ifdef ACCUMULATE_OUTGOING_ARGS
3056 int needed;
3057 #endif
3059 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3060 /* Define the boundary of the register parm stack space that needs to be
3061 save, if any. */
3062 int low_to_save = -1, high_to_save;
3063 rtx save_area = 0; /* Place that it is saved */
3064 #endif
3066 #ifdef ACCUMULATE_OUTGOING_ARGS
3067 /* Size of the stack reserved for parameter registers. */
3068 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3069 char *initial_stack_usage_map = stack_usage_map;
3070 #endif
3072 #ifdef REG_PARM_STACK_SPACE
3073 #ifdef MAYBE_REG_PARM_STACK_SPACE
3074 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3075 #else
3076 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3077 #endif
3078 #endif
3080 VA_START (p, nargs);
3082 #ifndef ANSI_PROTOTYPES
3083 orgfun = va_arg (p, rtx);
3084 value = va_arg (p, rtx);
3085 no_queue = va_arg (p, int);
3086 outmode = va_arg (p, enum machine_mode);
3087 nargs = va_arg (p, int);
3088 #endif
3090 is_const = no_queue;
3091 fun = orgfun;
3093 /* If this kind of value comes back in memory,
3094 decide where in memory it should come back. */
3095 if (aggregate_value_p (type_for_mode (outmode, 0)))
3097 #ifdef PCC_STATIC_STRUCT_RETURN
3098 rtx pointer_reg
3099 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
3101 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3102 pcc_struct_value = 1;
3103 if (value == 0)
3104 value = gen_reg_rtx (outmode);
3105 #else /* not PCC_STATIC_STRUCT_RETURN */
3106 struct_value_size = GET_MODE_SIZE (outmode);
3107 if (value != 0 && GET_CODE (value) == MEM)
3108 mem_value = value;
3109 else
3110 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
3111 #endif
3113 /* This call returns a big structure. */
3114 is_const = 0;
3117 /* ??? Unfinished: must pass the memory address as an argument. */
3119 /* Copy all the libcall-arguments out of the varargs data
3120 and into a vector ARGVEC.
3122 Compute how to pass each argument. We only support a very small subset
3123 of the full argument passing conventions to limit complexity here since
3124 library functions shouldn't have many args. */
3126 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
3127 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
3129 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3131 args_size.constant = 0;
3132 args_size.var = 0;
3134 count = 0;
3136 push_temp_slots ();
3138 /* If there's a structure value address to be passed,
3139 either pass it in the special place, or pass it as an extra argument. */
3140 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3142 rtx addr = XEXP (mem_value, 0);
3143 nargs++;
3145 /* Make sure it is a reasonable operand for a move or push insn. */
3146 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3147 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3148 addr = force_operand (addr, NULL_RTX);
3150 argvec[count].value = addr;
3151 argvec[count].mode = Pmode;
3152 argvec[count].partial = 0;
3154 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3155 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3156 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3157 abort ();
3158 #endif
3160 locate_and_pad_parm (Pmode, NULL_TREE,
3161 argvec[count].reg && argvec[count].partial == 0,
3162 NULL_TREE, &args_size, &argvec[count].offset,
3163 &argvec[count].size);
3166 if (argvec[count].reg == 0 || argvec[count].partial != 0
3167 || reg_parm_stack_space > 0)
3168 args_size.constant += argvec[count].size.constant;
3170 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3172 count++;
3175 for (; count < nargs; count++)
3177 rtx val = va_arg (p, rtx);
3178 enum machine_mode mode = va_arg (p, enum machine_mode);
3180 /* We cannot convert the arg value to the mode the library wants here;
3181 must do it earlier where we know the signedness of the arg. */
3182 if (mode == BLKmode
3183 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3184 abort ();
3186 /* On some machines, there's no way to pass a float to a library fcn.
3187 Pass it as a double instead. */
3188 #ifdef LIBGCC_NEEDS_DOUBLE
3189 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3190 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3191 #endif
3193 /* There's no need to call protect_from_queue, because
3194 either emit_move_insn or emit_push_insn will do that. */
3196 /* Make sure it is a reasonable operand for a move or push insn. */
3197 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3198 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3199 val = force_operand (val, NULL_RTX);
3201 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3202 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3204 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
3205 be viewed as just an efficiency improvement. */
3206 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3207 emit_move_insn (slot, val);
3208 val = XEXP (slot, 0);
3209 mode = Pmode;
3211 #endif
3213 argvec[count].value = val;
3214 argvec[count].mode = mode;
3216 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3217 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
3218 abort ();
3219 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3220 argvec[count].partial
3221 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3222 #else
3223 argvec[count].partial = 0;
3224 #endif
3226 locate_and_pad_parm (mode, NULL_TREE,
3227 argvec[count].reg && argvec[count].partial == 0,
3228 NULL_TREE, &args_size, &argvec[count].offset,
3229 &argvec[count].size);
3231 if (argvec[count].size.var)
3232 abort ();
3234 if (reg_parm_stack_space == 0 && argvec[count].partial)
3235 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3237 if (argvec[count].reg == 0 || argvec[count].partial != 0
3238 || reg_parm_stack_space > 0)
3239 args_size.constant += argvec[count].size.constant;
3241 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3243 va_end (p);
3245 #ifdef FINAL_REG_PARM_STACK_SPACE
3246 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3247 args_size.var);
3248 #endif
3249 /* If this machine requires an external definition for library
3250 functions, write one out. */
3251 assemble_external_libcall (fun);
3253 original_args_size = args_size;
3254 #ifdef PREFERRED_STACK_BOUNDARY
3255 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
3256 / STACK_BYTES) * STACK_BYTES);
3257 #endif
3259 args_size.constant = MAX (args_size.constant,
3260 reg_parm_stack_space);
3262 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3263 args_size.constant -= reg_parm_stack_space;
3264 #endif
3266 if (args_size.constant > current_function_outgoing_args_size)
3267 current_function_outgoing_args_size = args_size.constant;
3269 #ifdef ACCUMULATE_OUTGOING_ARGS
3270 /* Since the stack pointer will never be pushed, it is possible for
3271 the evaluation of a parm to clobber something we have already
3272 written to the stack. Since most function calls on RISC machines
3273 do not use the stack, this is uncommon, but must work correctly.
3275 Therefore, we save any area of the stack that was already written
3276 and that we are using. Here we set up to do this by making a new
3277 stack usage map from the old one.
3279 Another approach might be to try to reorder the argument
3280 evaluations to avoid this conflicting stack usage. */
3282 needed = args_size.constant;
3284 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3285 /* Since we will be writing into the entire argument area, the
3286 map must be allocated for its entire size, not just the part that
3287 is the responsibility of the caller. */
3288 needed += reg_parm_stack_space;
3289 #endif
3291 #ifdef ARGS_GROW_DOWNWARD
3292 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3293 needed + 1);
3294 #else
3295 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3296 needed);
3297 #endif
3298 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3300 if (initial_highest_arg_in_use)
3301 bcopy (initial_stack_usage_map, stack_usage_map,
3302 initial_highest_arg_in_use);
3304 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3305 bzero (&stack_usage_map[initial_highest_arg_in_use],
3306 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3307 needed = 0;
3309 /* The address of the outgoing argument list must not be copied to a
3310 register here, because argblock would be left pointing to the
3311 wrong place after the call to allocate_dynamic_stack_space below.
3314 argblock = virtual_outgoing_args_rtx;
3315 #else /* not ACCUMULATE_OUTGOING_ARGS */
3316 #ifndef PUSH_ROUNDING
3317 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3318 #endif
3319 #endif
3321 #ifdef PUSH_ARGS_REVERSED
3322 #ifdef PREFERRED_STACK_BOUNDARY
3323 /* If we push args individually in reverse order, perform stack alignment
3324 before the first push (the last arg). */
3325 if (argblock == 0)
3326 anti_adjust_stack (GEN_INT (args_size.constant
3327 - original_args_size.constant));
3328 #endif
3329 #endif
3331 #ifdef PUSH_ARGS_REVERSED
3332 inc = -1;
3333 argnum = nargs - 1;
3334 #else
3335 inc = 1;
3336 argnum = 0;
3337 #endif
3339 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3340 /* The argument list is the property of the called routine and it
3341 may clobber it. If the fixed area has been used for previous
3342 parameters, we must save and restore it.
3344 Here we compute the boundary of the that needs to be saved, if any. */
3346 #ifdef ARGS_GROW_DOWNWARD
3347 for (count = 0; count < reg_parm_stack_space + 1; count++)
3348 #else
3349 for (count = 0; count < reg_parm_stack_space; count++)
3350 #endif
3352 if (count >= highest_outgoing_arg_in_use
3353 || stack_usage_map[count] == 0)
3354 continue;
3356 if (low_to_save == -1)
3357 low_to_save = count;
3359 high_to_save = count;
3362 if (low_to_save >= 0)
3364 int num_to_save = high_to_save - low_to_save + 1;
3365 enum machine_mode save_mode
3366 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3367 rtx stack_area;
3369 /* If we don't have the required alignment, must do this in BLKmode. */
3370 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3371 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3372 save_mode = BLKmode;
3374 #ifdef ARGS_GROW_DOWNWARD
3375 stack_area = gen_rtx_MEM (save_mode,
3376 memory_address (save_mode,
3377 plus_constant (argblock,
3378 - high_to_save)));
3379 #else
3380 stack_area = gen_rtx_MEM (save_mode,
3381 memory_address (save_mode,
3382 plus_constant (argblock,
3383 low_to_save)));
3384 #endif
3385 if (save_mode == BLKmode)
3387 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3388 emit_block_move (validize_mem (save_area), stack_area,
3389 GEN_INT (num_to_save),
3390 PARM_BOUNDARY / BITS_PER_UNIT);
3392 else
3394 save_area = gen_reg_rtx (save_mode);
3395 emit_move_insn (save_area, stack_area);
3398 #endif
3400 /* Push the args that need to be pushed. */
3402 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3403 are to be pushed. */
3404 for (count = 0; count < nargs; count++, argnum += inc)
3406 register enum machine_mode mode = argvec[argnum].mode;
3407 register rtx val = argvec[argnum].value;
3408 rtx reg = argvec[argnum].reg;
3409 int partial = argvec[argnum].partial;
3410 #ifdef ACCUMULATE_OUTGOING_ARGS
3411 int lower_bound, upper_bound, i;
3412 #endif
3414 if (! (reg != 0 && partial == 0))
3416 #ifdef ACCUMULATE_OUTGOING_ARGS
3417 /* If this is being stored into a pre-allocated, fixed-size, stack
3418 area, save any previous data at that location. */
3420 #ifdef ARGS_GROW_DOWNWARD
3421 /* stack_slot is negative, but we want to index stack_usage_map
3422 with positive values. */
3423 upper_bound = -argvec[argnum].offset.constant + 1;
3424 lower_bound = upper_bound - argvec[argnum].size.constant;
3425 #else
3426 lower_bound = argvec[argnum].offset.constant;
3427 upper_bound = lower_bound + argvec[argnum].size.constant;
3428 #endif
3430 for (i = lower_bound; i < upper_bound; i++)
3431 if (stack_usage_map[i]
3432 /* Don't store things in the fixed argument area at this point;
3433 it has already been saved. */
3434 && i > reg_parm_stack_space)
3435 break;
3437 if (i != upper_bound)
3439 /* We need to make a save area. See what mode we can make it. */
3440 enum machine_mode save_mode
3441 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
3442 MODE_INT, 1);
3443 rtx stack_area
3444 = gen_rtx_MEM (save_mode,
3445 memory_address (save_mode,
3446 plus_constant (argblock,
3447 argvec[argnum].offset.constant)));
3448 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3449 emit_move_insn (argvec[argnum].save_area, stack_area);
3451 #endif
3452 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
3453 argblock, GEN_INT (argvec[argnum].offset.constant),
3454 reg_parm_stack_space);
3456 #ifdef ACCUMULATE_OUTGOING_ARGS
3457 /* Now mark the segment we just used. */
3458 for (i = lower_bound; i < upper_bound; i++)
3459 stack_usage_map[i] = 1;
3460 #endif
3462 NO_DEFER_POP;
3466 #ifndef PUSH_ARGS_REVERSED
3467 #ifdef PREFERRED_STACK_BOUNDARY
3468 /* If we pushed args in forward order, perform stack alignment
3469 after pushing the last arg. */
3470 if (argblock == 0)
3471 anti_adjust_stack (GEN_INT (args_size.constant
3472 - original_args_size.constant));
3473 #endif
3474 #endif
3476 #ifdef PUSH_ARGS_REVERSED
3477 argnum = nargs - 1;
3478 #else
3479 argnum = 0;
3480 #endif
3482 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
3484 /* Now load any reg parms into their regs. */
3486 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3487 are to be pushed. */
3488 for (count = 0; count < nargs; count++, argnum += inc)
3490 register rtx val = argvec[argnum].value;
3491 rtx reg = argvec[argnum].reg;
3492 int partial = argvec[argnum].partial;
3494 if (reg != 0 && partial == 0)
3495 emit_move_insn (reg, val);
3496 NO_DEFER_POP;
3499 #if 0
3500 /* For version 1.37, try deleting this entirely. */
3501 if (! no_queue)
3502 emit_queue ();
3503 #endif
3505 /* Any regs containing parms remain in use through the call. */
3506 for (count = 0; count < nargs; count++)
3507 if (argvec[count].reg != 0)
3508 use_reg (&call_fusage, argvec[count].reg);
3510 /* Pass the function the address in which to return a structure value. */
3511 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3513 emit_move_insn (struct_value_rtx,
3514 force_reg (Pmode,
3515 force_operand (XEXP (mem_value, 0),
3516 NULL_RTX)));
3517 if (GET_CODE (struct_value_rtx) == REG)
3518 use_reg (&call_fusage, struct_value_rtx);
3521 /* Don't allow popping to be deferred, since then
3522 cse'ing of library calls could delete a call and leave the pop. */
3523 NO_DEFER_POP;
3525 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3526 will set inhibit_defer_pop to that value. */
3527 /* See the comment in emit_library_call about the function type we build
3528 and pass here. */
3530 emit_call_1 (fun,
3531 get_identifier (XSTR (orgfun, 0)),
3532 build_function_type (type_for_mode (outmode, 0), NULL_TREE),
3533 args_size.constant, struct_value_size,
3534 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3535 mem_value == 0 ? hard_libcall_value (outmode) : NULL_RTX,
3536 old_inhibit_defer_pop + 1, call_fusage, is_const);
3538 /* Now restore inhibit_defer_pop to its actual original value. */
3539 OK_DEFER_POP;
3541 pop_temp_slots ();
3543 /* Copy the value to the right place. */
3544 if (outmode != VOIDmode)
3546 if (mem_value)
3548 if (value == 0)
3549 value = mem_value;
3550 if (value != mem_value)
3551 emit_move_insn (value, mem_value);
3553 else if (value != 0)
3554 emit_move_insn (value, hard_libcall_value (outmode));
3555 else
3556 value = hard_libcall_value (outmode);
3559 #ifdef ACCUMULATE_OUTGOING_ARGS
3560 #ifdef REG_PARM_STACK_SPACE
3561 if (save_area)
3563 enum machine_mode save_mode = GET_MODE (save_area);
3564 #ifdef ARGS_GROW_DOWNWARD
3565 rtx stack_area
3566 = gen_rtx_MEM (save_mode,
3567 memory_address (save_mode,
3568 plus_constant (argblock,
3569 - high_to_save)));
3570 #else
3571 rtx stack_area
3572 = gen_rtx_MEM (save_mode,
3573 memory_address (save_mode,
3574 plus_constant (argblock, low_to_save)));
3575 #endif
3576 if (save_mode != BLKmode)
3577 emit_move_insn (stack_area, save_area);
3578 else
3579 emit_block_move (stack_area, validize_mem (save_area),
3580 GEN_INT (high_to_save - low_to_save + 1),
3581 PARM_BOUNDARY / BITS_PER_UNIT);
3583 #endif
3585 /* If we saved any argument areas, restore them. */
3586 for (count = 0; count < nargs; count++)
3587 if (argvec[count].save_area)
3589 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3590 rtx stack_area
3591 = gen_rtx_MEM (save_mode,
3592 memory_address (save_mode, plus_constant (argblock,
3593 argvec[count].offset.constant)));
3595 emit_move_insn (stack_area, argvec[count].save_area);
3598 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3599 stack_usage_map = initial_stack_usage_map;
3600 #endif
3602 return value;
3605 #if 0
3606 /* Return an rtx which represents a suitable home on the stack
3607 given TYPE, the type of the argument looking for a home.
3608 This is called only for BLKmode arguments.
3610 SIZE is the size needed for this target.
3611 ARGS_ADDR is the address of the bottom of the argument block for this call.
3612 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3613 if this machine uses push insns. */
3615 static rtx
3616 target_for_arg (type, size, args_addr, offset)
3617 tree type;
3618 rtx size;
3619 rtx args_addr;
3620 struct args_size offset;
3622 rtx target;
3623 rtx offset_rtx = ARGS_SIZE_RTX (offset);
3625 /* We do not call memory_address if possible,
3626 because we want to address as close to the stack
3627 as possible. For non-variable sized arguments,
3628 this will be stack-pointer relative addressing. */
3629 if (GET_CODE (offset_rtx) == CONST_INT)
3630 target = plus_constant (args_addr, INTVAL (offset_rtx));
3631 else
3633 /* I have no idea how to guarantee that this
3634 will work in the presence of register parameters. */
3635 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
3636 target = memory_address (QImode, target);
3639 return gen_rtx_MEM (BLKmode, target);
3641 #endif
3643 /* Store a single argument for a function call
3644 into the register or memory area where it must be passed.
3645 *ARG describes the argument value and where to pass it.
3647 ARGBLOCK is the address of the stack-block for all the arguments,
3648 or 0 on a machine where arguments are pushed individually.
3650 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3651 so must be careful about how the stack is used.
3653 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3654 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3655 that we need not worry about saving and restoring the stack.
3657 FNDECL is the declaration of the function we are calling. */
3659 static void
3660 store_one_arg (arg, argblock, may_be_alloca, variable_size,
3661 reg_parm_stack_space)
3662 struct arg_data *arg;
3663 rtx argblock;
3664 int may_be_alloca;
3665 int variable_size ATTRIBUTE_UNUSED;
3666 int reg_parm_stack_space;
3668 register tree pval = arg->tree_value;
3669 rtx reg = 0;
3670 int partial = 0;
3671 int used = 0;
3672 #ifdef ACCUMULATE_OUTGOING_ARGS
3673 int i, lower_bound, upper_bound;
3674 #endif
3676 if (TREE_CODE (pval) == ERROR_MARK)
3677 return;
3679 /* Push a new temporary level for any temporaries we make for
3680 this argument. */
3681 push_temp_slots ();
3683 #ifdef ACCUMULATE_OUTGOING_ARGS
3684 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3685 save any previous data at that location. */
3686 if (argblock && ! variable_size && arg->stack)
3688 #ifdef ARGS_GROW_DOWNWARD
3689 /* stack_slot is negative, but we want to index stack_usage_map
3690 with positive values. */
3691 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3692 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3693 else
3694 upper_bound = 0;
3696 lower_bound = upper_bound - arg->size.constant;
3697 #else
3698 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3699 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3700 else
3701 lower_bound = 0;
3703 upper_bound = lower_bound + arg->size.constant;
3704 #endif
3706 for (i = lower_bound; i < upper_bound; i++)
3707 if (stack_usage_map[i]
3708 /* Don't store things in the fixed argument area at this point;
3709 it has already been saved. */
3710 && i > reg_parm_stack_space)
3711 break;
3713 if (i != upper_bound)
3715 /* We need to make a save area. See what mode we can make it. */
3716 enum machine_mode save_mode
3717 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
3718 rtx stack_area
3719 = gen_rtx_MEM (save_mode,
3720 memory_address (save_mode,
3721 XEXP (arg->stack_slot, 0)));
3723 if (save_mode == BLKmode)
3725 arg->save_area = assign_stack_temp (BLKmode,
3726 arg->size.constant, 0);
3727 MEM_SET_IN_STRUCT_P (arg->save_area,
3728 AGGREGATE_TYPE_P (TREE_TYPE
3729 (arg->tree_value)));
3730 preserve_temp_slots (arg->save_area);
3731 emit_block_move (validize_mem (arg->save_area), stack_area,
3732 GEN_INT (arg->size.constant),
3733 PARM_BOUNDARY / BITS_PER_UNIT);
3735 else
3737 arg->save_area = gen_reg_rtx (save_mode);
3738 emit_move_insn (arg->save_area, stack_area);
3743 /* Now that we have saved any slots that will be overwritten by this
3744 store, mark all slots this store will use. We must do this before
3745 we actually expand the argument since the expansion itself may
3746 trigger library calls which might need to use the same stack slot. */
3747 if (argblock && ! variable_size && arg->stack)
3748 for (i = lower_bound; i < upper_bound; i++)
3749 stack_usage_map[i] = 1;
3750 #endif
3752 /* If this isn't going to be placed on both the stack and in registers,
3753 set up the register and number of words. */
3754 if (! arg->pass_on_stack)
3755 reg = arg->reg, partial = arg->partial;
3757 if (reg != 0 && partial == 0)
3758 /* Being passed entirely in a register. We shouldn't be called in
3759 this case. */
3760 abort ();
3762 /* If this arg needs special alignment, don't load the registers
3763 here. */
3764 if (arg->n_aligned_regs != 0)
3765 reg = 0;
3767 /* If this is being passed partially in a register, we can't evaluate
3768 it directly into its stack slot. Otherwise, we can. */
3769 if (arg->value == 0)
3771 #ifdef ACCUMULATE_OUTGOING_ARGS
3772 /* stack_arg_under_construction is nonzero if a function argument is
3773 being evaluated directly into the outgoing argument list and
3774 expand_call must take special action to preserve the argument list
3775 if it is called recursively.
3777 For scalar function arguments stack_usage_map is sufficient to
3778 determine which stack slots must be saved and restored. Scalar
3779 arguments in general have pass_on_stack == 0.
3781 If this argument is initialized by a function which takes the
3782 address of the argument (a C++ constructor or a C function
3783 returning a BLKmode structure), then stack_usage_map is
3784 insufficient and expand_call must push the stack around the
3785 function call. Such arguments have pass_on_stack == 1.
3787 Note that it is always safe to set stack_arg_under_construction,
3788 but this generates suboptimal code if set when not needed. */
3790 if (arg->pass_on_stack)
3791 stack_arg_under_construction++;
3792 #endif
3793 arg->value = expand_expr (pval,
3794 (partial
3795 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
3796 ? NULL_RTX : arg->stack,
3797 VOIDmode, 0);
3799 /* If we are promoting object (or for any other reason) the mode
3800 doesn't agree, convert the mode. */
3802 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
3803 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
3804 arg->value, arg->unsignedp);
3806 #ifdef ACCUMULATE_OUTGOING_ARGS
3807 if (arg->pass_on_stack)
3808 stack_arg_under_construction--;
3809 #endif
3812 /* Don't allow anything left on stack from computation
3813 of argument to alloca. */
3814 if (may_be_alloca)
3815 do_pending_stack_adjust ();
3817 if (arg->value == arg->stack)
3819 /* If the value is already in the stack slot, we are done moving
3820 data. */
3821 if (current_function_check_memory_usage && GET_CODE (arg->stack) == MEM)
3823 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3824 XEXP (arg->stack, 0), ptr_mode,
3825 ARGS_SIZE_RTX (arg->size),
3826 TYPE_MODE (sizetype),
3827 GEN_INT (MEMORY_USE_RW),
3828 TYPE_MODE (integer_type_node));
3831 else if (arg->mode != BLKmode)
3833 register int size;
3835 /* Argument is a scalar, not entirely passed in registers.
3836 (If part is passed in registers, arg->partial says how much
3837 and emit_push_insn will take care of putting it there.)
3839 Push it, and if its size is less than the
3840 amount of space allocated to it,
3841 also bump stack pointer by the additional space.
3842 Note that in C the default argument promotions
3843 will prevent such mismatches. */
3845 size = GET_MODE_SIZE (arg->mode);
3846 /* Compute how much space the push instruction will push.
3847 On many machines, pushing a byte will advance the stack
3848 pointer by a halfword. */
3849 #ifdef PUSH_ROUNDING
3850 size = PUSH_ROUNDING (size);
3851 #endif
3852 used = size;
3854 /* Compute how much space the argument should get:
3855 round up to a multiple of the alignment for arguments. */
3856 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
3857 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
3858 / (PARM_BOUNDARY / BITS_PER_UNIT))
3859 * (PARM_BOUNDARY / BITS_PER_UNIT));
3861 /* This isn't already where we want it on the stack, so put it there.
3862 This can either be done with push or copy insns. */
3863 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
3864 partial, reg, used - size, argblock,
3865 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space);
3867 else
3869 /* BLKmode, at least partly to be pushed. */
3871 register int excess;
3872 rtx size_rtx;
3874 /* Pushing a nonscalar.
3875 If part is passed in registers, PARTIAL says how much
3876 and emit_push_insn will take care of putting it there. */
3878 /* Round its size up to a multiple
3879 of the allocation unit for arguments. */
3881 if (arg->size.var != 0)
3883 excess = 0;
3884 size_rtx = ARGS_SIZE_RTX (arg->size);
3886 else
3888 /* PUSH_ROUNDING has no effect on us, because
3889 emit_push_insn for BLKmode is careful to avoid it. */
3890 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
3891 + partial * UNITS_PER_WORD);
3892 size_rtx = expr_size (pval);
3895 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
3896 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
3897 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset),
3898 reg_parm_stack_space);
3902 /* Unless this is a partially-in-register argument, the argument is now
3903 in the stack.
3905 ??? Note that this can change arg->value from arg->stack to
3906 arg->stack_slot and it matters when they are not the same.
3907 It isn't totally clear that this is correct in all cases. */
3908 if (partial == 0)
3909 arg->value = arg->stack_slot;
3911 /* Once we have pushed something, pops can't safely
3912 be deferred during the rest of the arguments. */
3913 NO_DEFER_POP;
3915 /* ANSI doesn't require a sequence point here,
3916 but PCC has one, so this will avoid some problems. */
3917 emit_queue ();
3919 /* Free any temporary slots made in processing this argument. Show
3920 that we might have taken the address of something and pushed that
3921 as an operand. */
3922 preserve_temp_slots (NULL_RTX);
3923 free_temp_slots ();
3924 pop_temp_slots ();