Daily bump.
[official-gcc.git] / gcc / calls.c
blobd0153a3d1249bbf5d3549aead9df22c51e14d6f1
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 92-97, 1998, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "rtl.h"
24 #include "tree.h"
25 #include "flags.h"
26 #include "expr.h"
27 #include "regs.h"
28 #include "insn-flags.h"
29 #include "toplev.h"
30 #include "output.h"
32 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
33 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
34 #endif
36 /* Decide whether a function's arguments should be processed
37 from first to last or from last to first.
39 They should if the stack and args grow in opposite directions, but
40 only if we have push insns. */
42 #ifdef PUSH_ROUNDING
44 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
45 #define PUSH_ARGS_REVERSED /* If it's last to first */
46 #endif
48 #endif
50 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
51 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
53 /* Data structure and subroutines used within expand_call. */
55 struct arg_data
57 /* Tree node for this argument. */
58 tree tree_value;
59 /* Mode for value; TYPE_MODE unless promoted. */
60 enum machine_mode mode;
61 /* Current RTL value for argument, or 0 if it isn't precomputed. */
62 rtx value;
63 /* Initially-compute RTL value for argument; only for const functions. */
64 rtx initial_value;
65 /* Register to pass this argument in, 0 if passed on stack, or an
66 PARALLEL if the arg is to be copied into multiple non-contiguous
67 registers. */
68 rtx reg;
69 /* If REG was promoted from the actual mode of the argument expression,
70 indicates whether the promotion is sign- or zero-extended. */
71 int unsignedp;
72 /* Number of registers to use. 0 means put the whole arg in registers.
73 Also 0 if not passed in registers. */
74 int partial;
75 /* Non-zero if argument must be passed on stack.
76 Note that some arguments may be passed on the stack
77 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
78 pass_on_stack identifies arguments that *cannot* go in registers. */
79 int pass_on_stack;
80 /* Offset of this argument from beginning of stack-args. */
81 struct args_size offset;
82 /* Similar, but offset to the start of the stack slot. Different from
83 OFFSET if this arg pads downward. */
84 struct args_size slot_offset;
85 /* Size of this argument on the stack, rounded up for any padding it gets,
86 parts of the argument passed in registers do not count.
87 If REG_PARM_STACK_SPACE is defined, then register parms
88 are counted here as well. */
89 struct args_size size;
90 /* Location on the stack at which parameter should be stored. The store
91 has already been done if STACK == VALUE. */
92 rtx stack;
93 /* Location on the stack of the start of this argument slot. This can
94 differ from STACK if this arg pads downward. This location is known
95 to be aligned to FUNCTION_ARG_BOUNDARY. */
96 rtx stack_slot;
97 #ifdef ACCUMULATE_OUTGOING_ARGS
98 /* Place that this stack area has been saved, if needed. */
99 rtx save_area;
100 #endif
101 /* If an argument's alignment does not permit direct copying into registers,
102 copy in smaller-sized pieces into pseudos. These are stored in a
103 block pointed to by this field. The next field says how many
104 word-sized pseudos we made. */
105 rtx *aligned_regs;
106 int n_aligned_regs;
109 #ifdef ACCUMULATE_OUTGOING_ARGS
110 /* A vector of one char per byte of stack space. A byte if non-zero if
111 the corresponding stack location has been used.
112 This vector is used to prevent a function call within an argument from
113 clobbering any stack already set up. */
114 static char *stack_usage_map;
116 /* Size of STACK_USAGE_MAP. */
117 static int highest_outgoing_arg_in_use;
119 /* stack_arg_under_construction is nonzero when an argument may be
120 initialized with a constructor call (including a C function that
121 returns a BLKmode struct) and expand_call must take special action
122 to make sure the object being constructed does not overlap the
123 argument list for the constructor call. */
124 int stack_arg_under_construction;
125 #endif
127 static int calls_function PROTO ((tree, int));
128 static int calls_function_1 PROTO ((tree, int));
129 static void emit_call_1 PROTO ((rtx, tree, tree, HOST_WIDE_INT,
130 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
131 rtx, int, rtx, int));
132 static void special_function_p PROTO ((char *, tree, int *, int *,
133 int *, int *));
134 static void precompute_register_parameters PROTO ((int, struct arg_data *,
135 int *));
136 static void store_one_arg PROTO ((struct arg_data *, rtx, int, int,
137 int));
138 static void store_unaligned_arguments_into_pseudos PROTO ((struct arg_data *,
139 int));
140 static int finalize_must_preallocate PROTO ((int, int,
141 struct arg_data *,
142 struct args_size *));
143 static void precompute_arguments PROTO ((int, int, int,
144 struct arg_data *,
145 struct args_size *));
146 static int compute_argument_block_size PROTO ((int,
147 struct args_size *));
148 static void initialize_argument_information PROTO ((int,
149 struct arg_data *,
150 struct args_size *,
151 int, tree, tree,
152 CUMULATIVE_ARGS *,
153 int, rtx *, int *,
154 int *, int *));
155 static void compute_argument_addresses PROTO ((struct arg_data *,
156 rtx, int));
157 static rtx rtx_for_function_call PROTO ((tree, tree));
158 static void load_register_parameters PROTO ((struct arg_data *,
159 int, rtx *));
161 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
162 static rtx save_fixed_argument_area PROTO ((int, rtx, int *, int *));
163 static void restore_fixed_argument_area PROTO ((rtx, rtx, int, int));
164 #endif
166 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
167 `alloca'.
169 If WHICH is 0, return 1 if EXP contains a call to any function.
170 Actually, we only need return 1 if evaluating EXP would require pushing
171 arguments on the stack, but that is too difficult to compute, so we just
172 assume any function call might require the stack. */
174 static tree calls_function_save_exprs;
176 static int
177 calls_function (exp, which)
178 tree exp;
179 int which;
181 int val;
182 calls_function_save_exprs = 0;
183 val = calls_function_1 (exp, which);
184 calls_function_save_exprs = 0;
185 return val;
188 static int
189 calls_function_1 (exp, which)
190 tree exp;
191 int which;
193 register int i;
194 enum tree_code code = TREE_CODE (exp);
195 int type = TREE_CODE_CLASS (code);
196 int length = tree_code_length[(int) code];
198 /* If this code is language-specific, we don't know what it will do. */
199 if ((int) code >= NUM_TREE_CODES)
200 return 1;
202 /* Only expressions and references can contain calls. */
203 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r'
204 && type != 'b')
205 return 0;
207 switch (code)
209 case CALL_EXPR:
210 if (which == 0)
211 return 1;
212 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
213 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
214 == FUNCTION_DECL))
216 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
218 if ((DECL_BUILT_IN (fndecl)
219 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
220 || (DECL_SAVED_INSNS (fndecl)
221 && (FUNCTION_FLAGS (DECL_SAVED_INSNS (fndecl))
222 & FUNCTION_FLAGS_CALLS_ALLOCA)))
223 return 1;
226 /* Third operand is RTL. */
227 length = 2;
228 break;
230 case SAVE_EXPR:
231 if (SAVE_EXPR_RTL (exp) != 0)
232 return 0;
233 if (value_member (exp, calls_function_save_exprs))
234 return 0;
235 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
236 calls_function_save_exprs);
237 return (TREE_OPERAND (exp, 0) != 0
238 && calls_function_1 (TREE_OPERAND (exp, 0), which));
240 case BLOCK:
242 register tree local;
244 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
245 if (DECL_INITIAL (local) != 0
246 && calls_function_1 (DECL_INITIAL (local), which))
247 return 1;
250 register tree subblock;
252 for (subblock = BLOCK_SUBBLOCKS (exp);
253 subblock;
254 subblock = TREE_CHAIN (subblock))
255 if (calls_function_1 (subblock, which))
256 return 1;
258 return 0;
260 case METHOD_CALL_EXPR:
261 length = 3;
262 break;
264 case WITH_CLEANUP_EXPR:
265 length = 1;
266 break;
268 case RTL_EXPR:
269 return 0;
271 default:
272 break;
275 for (i = 0; i < length; i++)
276 if (TREE_OPERAND (exp, i) != 0
277 && calls_function_1 (TREE_OPERAND (exp, i), which))
278 return 1;
280 return 0;
283 /* Force FUNEXP into a form suitable for the address of a CALL,
284 and return that as an rtx. Also load the static chain register
285 if FNDECL is a nested function.
287 CALL_FUSAGE points to a variable holding the prospective
288 CALL_INSN_FUNCTION_USAGE information. */
291 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen)
292 rtx funexp;
293 tree fndecl;
294 rtx *call_fusage;
295 int reg_parm_seen;
297 rtx static_chain_value = 0;
299 funexp = protect_from_queue (funexp, 0);
301 if (fndecl != 0)
302 /* Get possible static chain value for nested function in C. */
303 static_chain_value = lookup_static_chain (fndecl);
305 /* Make a valid memory address and copy constants thru pseudo-regs,
306 but not for a constant address if -fno-function-cse. */
307 if (GET_CODE (funexp) != SYMBOL_REF)
308 /* If we are using registers for parameters, force the
309 function address into a register now. */
310 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
311 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
312 : memory_address (FUNCTION_MODE, funexp));
313 else
315 #ifndef NO_FUNCTION_CSE
316 if (optimize && ! flag_no_function_cse)
317 #ifdef NO_RECURSIVE_FUNCTION_CSE
318 if (fndecl != current_function_decl)
319 #endif
320 funexp = force_reg (Pmode, funexp);
321 #endif
324 if (static_chain_value != 0)
326 emit_move_insn (static_chain_rtx, static_chain_value);
328 if (GET_CODE (static_chain_rtx) == REG)
329 use_reg (call_fusage, static_chain_rtx);
332 return funexp;
335 /* Generate instructions to call function FUNEXP,
336 and optionally pop the results.
337 The CALL_INSN is the first insn generated.
339 FNDECL is the declaration node of the function. This is given to the
340 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
342 FUNTYPE is the data type of the function. This is given to the macro
343 RETURN_POPS_ARGS to determine whether this function pops its own args.
344 We used to allow an identifier for library functions, but that doesn't
345 work when the return type is an aggregate type and the calling convention
346 says that the pointer to this aggregate is to be popped by the callee.
348 STACK_SIZE is the number of bytes of arguments on the stack,
349 rounded up to PREFERRED_STACK_BOUNDARY; zero if the size is variable.
350 This is both to put into the call insn and
351 to generate explicit popping code if necessary.
353 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
354 It is zero if this call doesn't want a structure value.
356 NEXT_ARG_REG is the rtx that results from executing
357 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
358 just after all the args have had their registers assigned.
359 This could be whatever you like, but normally it is the first
360 arg-register beyond those used for args in this call,
361 or 0 if all the arg-registers are used in this call.
362 It is passed on to `gen_call' so you can put this info in the call insn.
364 VALREG is a hard register in which a value is returned,
365 or 0 if the call does not return a value.
367 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
368 the args to this call were processed.
369 We restore `inhibit_defer_pop' to that value.
371 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
372 denote registers used by the called function.
374 IS_CONST is true if this is a `const' call. */
376 static void
377 emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
378 struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
379 call_fusage, is_const)
380 rtx funexp;
381 tree fndecl ATTRIBUTE_UNUSED;
382 tree funtype ATTRIBUTE_UNUSED;
383 HOST_WIDE_INT stack_size;
384 HOST_WIDE_INT rounded_stack_size;
385 HOST_WIDE_INT struct_value_size;
386 rtx next_arg_reg;
387 rtx valreg;
388 int old_inhibit_defer_pop;
389 rtx call_fusage;
390 int is_const;
392 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
393 rtx struct_value_size_rtx = GEN_INT (struct_value_size);
394 rtx call_insn;
395 #ifndef ACCUMULATE_OUTGOING_ARGS
396 int already_popped = 0;
397 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
398 #endif
400 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
401 and we don't want to load it into a register as an optimization,
402 because prepare_call_address already did it if it should be done. */
403 if (GET_CODE (funexp) != SYMBOL_REF)
404 funexp = memory_address (FUNCTION_MODE, funexp);
406 #ifndef ACCUMULATE_OUTGOING_ARGS
407 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
408 if (HAVE_call_pop && HAVE_call_value_pop && n_popped > 0)
410 rtx n_pop = GEN_INT (n_popped);
411 rtx pat;
413 /* If this subroutine pops its own args, record that in the call insn
414 if possible, for the sake of frame pointer elimination. */
416 if (valreg)
417 pat = gen_call_value_pop (valreg,
418 gen_rtx_MEM (FUNCTION_MODE, funexp),
419 rounded_stack_size_rtx, next_arg_reg, n_pop);
420 else
421 pat = gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, funexp),
422 rounded_stack_size_rtx, next_arg_reg, n_pop);
424 emit_call_insn (pat);
425 already_popped = 1;
427 else
428 #endif
429 #endif
431 #if defined (HAVE_call) && defined (HAVE_call_value)
432 if (HAVE_call && HAVE_call_value)
434 if (valreg)
435 emit_call_insn (gen_call_value (valreg,
436 gen_rtx_MEM (FUNCTION_MODE, funexp),
437 rounded_stack_size_rtx, next_arg_reg,
438 NULL_RTX));
439 else
440 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, funexp),
441 rounded_stack_size_rtx, next_arg_reg,
442 struct_value_size_rtx));
444 else
445 #endif
446 abort ();
448 /* Find the CALL insn we just emitted. */
449 for (call_insn = get_last_insn ();
450 call_insn && GET_CODE (call_insn) != CALL_INSN;
451 call_insn = PREV_INSN (call_insn))
454 if (! call_insn)
455 abort ();
457 /* Put the register usage information on the CALL. If there is already
458 some usage information, put ours at the end. */
459 if (CALL_INSN_FUNCTION_USAGE (call_insn))
461 rtx link;
463 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
464 link = XEXP (link, 1))
467 XEXP (link, 1) = call_fusage;
469 else
470 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
472 /* If this is a const call, then set the insn's unchanging bit. */
473 if (is_const)
474 CONST_CALL_P (call_insn) = 1;
476 /* Restore this now, so that we do defer pops for this call's args
477 if the context of the call as a whole permits. */
478 inhibit_defer_pop = old_inhibit_defer_pop;
480 #ifndef ACCUMULATE_OUTGOING_ARGS
481 /* If returning from the subroutine does not automatically pop the args,
482 we need an instruction to pop them sooner or later.
483 Perhaps do it now; perhaps just record how much space to pop later.
485 If returning from the subroutine does pop the args, indicate that the
486 stack pointer will be changed. */
488 if (n_popped > 0)
490 if (!already_popped)
491 CALL_INSN_FUNCTION_USAGE (call_insn)
492 = gen_rtx_EXPR_LIST (VOIDmode,
493 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
494 CALL_INSN_FUNCTION_USAGE (call_insn));
495 rounded_stack_size -= n_popped;
496 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
499 if (rounded_stack_size != 0)
501 if (flag_defer_pop && inhibit_defer_pop == 0 && !is_const)
502 pending_stack_adjust += rounded_stack_size;
503 else
504 adjust_stack (rounded_stack_size_rtx);
506 #endif
509 /* Determine if the function identified by NAME and FNDECL is one with
510 special properties we wish to know about.
512 For example, if the function might return more than one time (setjmp), then
513 set RETURNS_TWICE to a nonzero value.
515 Similarly set IS_LONGJMP for if the function is in the longjmp family.
517 Set IS_MALLOC for any of the standard memory allocation functions which
518 allocate from the heap.
520 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
521 space from the stack such as alloca. */
523 static void
524 special_function_p (name, fndecl, returns_twice, is_longjmp,
525 is_malloc, may_be_alloca)
526 char *name;
527 tree fndecl;
528 int *returns_twice;
529 int *is_longjmp;
530 int *is_malloc;
531 int *may_be_alloca;
533 *returns_twice = 0;
534 *is_longjmp = 0;
535 *is_malloc = 0;
536 *may_be_alloca = 0;
538 if (name != 0 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
539 /* Exclude functions not at the file scope, or not `extern',
540 since they are not the magic functions we would otherwise
541 think they are. */
542 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
544 char *tname = name;
546 /* We assume that alloca will always be called by name. It
547 makes no sense to pass it as a pointer-to-function to
548 anything that does not understand its behavior. */
549 *may_be_alloca
550 = (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
551 && name[0] == 'a'
552 && ! strcmp (name, "alloca"))
553 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
554 && name[0] == '_'
555 && ! strcmp (name, "__builtin_alloca"))));
557 /* Disregard prefix _, __ or __x. */
558 if (name[0] == '_')
560 if (name[1] == '_' && name[2] == 'x')
561 tname += 3;
562 else if (name[1] == '_')
563 tname += 2;
564 else
565 tname += 1;
568 if (tname[0] == 's')
570 *returns_twice
571 = ((tname[1] == 'e'
572 && (! strcmp (tname, "setjmp")
573 || ! strcmp (tname, "setjmp_syscall")))
574 || (tname[1] == 'i'
575 && ! strcmp (tname, "sigsetjmp"))
576 || (tname[1] == 'a'
577 && ! strcmp (tname, "savectx")));
578 if (tname[1] == 'i'
579 && ! strcmp (tname, "siglongjmp"))
580 *is_longjmp = 1;
582 else if ((tname[0] == 'q' && tname[1] == 's'
583 && ! strcmp (tname, "qsetjmp"))
584 || (tname[0] == 'v' && tname[1] == 'f'
585 && ! strcmp (tname, "vfork")))
586 *returns_twice = 1;
588 else if (tname[0] == 'l' && tname[1] == 'o'
589 && ! strcmp (tname, "longjmp"))
590 *is_longjmp = 1;
591 /* XXX should have "malloc" attribute on functions instead
592 of recognizing them by name. */
593 else if (! strcmp (tname, "malloc")
594 || ! strcmp (tname, "calloc")
595 || ! strcmp (tname, "realloc")
596 /* Note use of NAME rather than TNAME here. These functions
597 are only reserved when preceded with __. */
598 || ! strcmp (name, "__vn") /* mangled __builtin_vec_new */
599 || ! strcmp (name, "__nw") /* mangled __builtin_new */
600 || ! strcmp (name, "__builtin_new")
601 || ! strcmp (name, "__builtin_vec_new"))
602 *is_malloc = 1;
606 /* Precompute all register parameters as described by ARGS, storing values
607 into fields within the ARGS array.
609 NUM_ACTUALS indicates the total number elements in the ARGS array.
611 Set REG_PARM_SEEN if we encounter a register parameter. */
613 static void
614 precompute_register_parameters (num_actuals, args, reg_parm_seen)
615 int num_actuals;
616 struct arg_data *args;
617 int *reg_parm_seen;
619 int i;
621 *reg_parm_seen = 0;
623 for (i = 0; i < num_actuals; i++)
624 if (args[i].reg != 0 && ! args[i].pass_on_stack)
626 *reg_parm_seen = 1;
628 if (args[i].value == 0)
630 push_temp_slots ();
631 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
632 VOIDmode, 0);
633 preserve_temp_slots (args[i].value);
634 pop_temp_slots ();
636 /* ANSI doesn't require a sequence point here,
637 but PCC has one, so this will avoid some problems. */
638 emit_queue ();
641 /* If we are to promote the function arg to a wider mode,
642 do it now. */
644 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
645 args[i].value
646 = convert_modes (args[i].mode,
647 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
648 args[i].value, args[i].unsignedp);
650 /* If the value is expensive, and we are inside an appropriately
651 short loop, put the value into a pseudo and then put the pseudo
652 into the hard reg.
654 For small register classes, also do this if this call uses
655 register parameters. This is to avoid reload conflicts while
656 loading the parameters registers. */
658 if ((! (GET_CODE (args[i].value) == REG
659 || (GET_CODE (args[i].value) == SUBREG
660 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
661 && args[i].mode != BLKmode
662 && rtx_cost (args[i].value, SET) > 2
663 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
664 || preserve_subexpressions_p ()))
665 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
669 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
671 /* The argument list is the property of the called routine and it
672 may clobber it. If the fixed area has been used for previous
673 parameters, we must save and restore it. */
674 static rtx
675 save_fixed_argument_area (reg_parm_stack_space, argblock,
676 low_to_save, high_to_save)
677 int reg_parm_stack_space;
678 rtx argblock;
679 int *low_to_save;
680 int *high_to_save;
682 int i;
683 rtx save_area = NULL_RTX;
685 /* Compute the boundary of the that needs to be saved, if any. */
686 #ifdef ARGS_GROW_DOWNWARD
687 for (i = 0; i < reg_parm_stack_space + 1; i++)
688 #else
689 for (i = 0; i < reg_parm_stack_space; i++)
690 #endif
692 if (i >= highest_outgoing_arg_in_use
693 || stack_usage_map[i] == 0)
694 continue;
696 if (*low_to_save == -1)
697 *low_to_save = i;
699 *high_to_save = i;
702 if (*low_to_save >= 0)
704 int num_to_save = *high_to_save - *low_to_save + 1;
705 enum machine_mode save_mode
706 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
707 rtx stack_area;
709 /* If we don't have the required alignment, must do this in BLKmode. */
710 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
711 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
712 save_mode = BLKmode;
714 #ifdef ARGS_GROW_DOWNWARD
715 stack_area = gen_rtx_MEM (save_mode,
716 memory_address (save_mode,
717 plus_constant (argblock,
718 - *high_to_save)));
719 #else
720 stack_area = gen_rtx_MEM (save_mode,
721 memory_address (save_mode,
722 plus_constant (argblock,
723 *low_to_save)));
724 #endif
725 if (save_mode == BLKmode)
727 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
728 emit_block_move (validize_mem (save_area), stack_area,
729 GEN_INT (num_to_save),
730 PARM_BOUNDARY / BITS_PER_UNIT);
732 else
734 save_area = gen_reg_rtx (save_mode);
735 emit_move_insn (save_area, stack_area);
738 return save_area;
741 static void
742 restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
743 rtx save_area;
744 rtx argblock;
745 int high_to_save;
746 int low_to_save;
748 enum machine_mode save_mode = GET_MODE (save_area);
749 #ifdef ARGS_GROW_DOWNWARD
750 rtx stack_area
751 = gen_rtx_MEM (save_mode,
752 memory_address (save_mode,
753 plus_constant (argblock,
754 - high_to_save)));
755 #else
756 rtx stack_area
757 = gen_rtx_MEM (save_mode,
758 memory_address (save_mode,
759 plus_constant (argblock,
760 low_to_save)));
761 #endif
763 if (save_mode != BLKmode)
764 emit_move_insn (stack_area, save_area);
765 else
766 emit_block_move (stack_area, validize_mem (save_area),
767 GEN_INT (high_to_save - low_to_save + 1),
768 PARM_BOUNDARY / BITS_PER_UNIT);
770 #endif
772 /* If any elements in ARGS refer to parameters that are to be passed in
773 registers, but not in memory, and whose alignment does not permit a
774 direct copy into registers. Copy the values into a group of pseudos
775 which we will later copy into the appropriate hard registers.
777 Pseudos for each unaligned argument will be stored into the array
778 args[argnum].aligned_regs. The caller is responsible for deallocating
779 the aligned_regs array if it is nonzero. */
781 static void
782 store_unaligned_arguments_into_pseudos (args, num_actuals)
783 struct arg_data *args;
784 int num_actuals;
786 int i, j;
788 for (i = 0; i < num_actuals; i++)
789 if (args[i].reg != 0 && ! args[i].pass_on_stack
790 && args[i].mode == BLKmode
791 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
792 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
794 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
795 int big_endian_correction = 0;
797 args[i].n_aligned_regs
798 = args[i].partial ? args[i].partial
799 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
801 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
802 * args[i].n_aligned_regs);
804 /* Structures smaller than a word are aligned to the least
805 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
806 this means we must skip the empty high order bytes when
807 calculating the bit offset. */
808 if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD)
809 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
811 for (j = 0; j < args[i].n_aligned_regs; j++)
813 rtx reg = gen_reg_rtx (word_mode);
814 rtx word = operand_subword_force (args[i].value, j, BLKmode);
815 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
816 int bitalign = TYPE_ALIGN (TREE_TYPE (args[i].tree_value));
818 args[i].aligned_regs[j] = reg;
820 /* There is no need to restrict this code to loading items
821 in TYPE_ALIGN sized hunks. The bitfield instructions can
822 load up entire word sized registers efficiently.
824 ??? This may not be needed anymore.
825 We use to emit a clobber here but that doesn't let later
826 passes optimize the instructions we emit. By storing 0 into
827 the register later passes know the first AND to zero out the
828 bitfield being set in the register is unnecessary. The store
829 of 0 will be deleted as will at least the first AND. */
831 emit_move_insn (reg, const0_rtx);
833 bytes -= bitsize / BITS_PER_UNIT;
834 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
835 extract_bit_field (word, bitsize, 0, 1,
836 NULL_RTX, word_mode,
837 word_mode,
838 bitalign / BITS_PER_UNIT,
839 BITS_PER_WORD),
840 bitalign / BITS_PER_UNIT, BITS_PER_WORD);
845 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
846 ACTPARMS.
848 NUM_ACTUALS is the total number of parameters.
850 N_NAMED_ARGS is the total number of named arguments.
852 FNDECL is the tree code for the target of this call (if known)
854 ARGS_SO_FAR holds state needed by the target to know where to place
855 the next argument.
857 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
858 for arguments which are passed in registers.
860 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
861 and may be modified by this routine.
863 OLD_PENDING_ADJ, MUST_PREALLOCATE and IS_CONST are pointers to integer
864 flags which may may be modified by this routine. */
866 static void
867 initialize_argument_information (num_actuals, args, args_size, n_named_args,
868 actparms, fndecl, args_so_far,
869 reg_parm_stack_space, old_stack_level,
870 old_pending_adj, must_preallocate, is_const)
871 int num_actuals ATTRIBUTE_UNUSED;
872 struct arg_data *args;
873 struct args_size *args_size;
874 int n_named_args ATTRIBUTE_UNUSED;
875 tree actparms;
876 tree fndecl;
877 CUMULATIVE_ARGS *args_so_far;
878 int reg_parm_stack_space;
879 rtx *old_stack_level;
880 int *old_pending_adj;
881 int *must_preallocate;
882 int *is_const;
884 /* 1 if scanning parms front to back, -1 if scanning back to front. */
885 int inc;
887 /* Count arg position in order args appear. */
888 int argpos;
890 int i;
891 tree p;
893 args_size->constant = 0;
894 args_size->var = 0;
896 /* In this loop, we consider args in the order they are written.
897 We fill up ARGS from the front or from the back if necessary
898 so that in any case the first arg to be pushed ends up at the front. */
900 #ifdef PUSH_ARGS_REVERSED
901 i = num_actuals - 1, inc = -1;
902 /* In this case, must reverse order of args
903 so that we compute and push the last arg first. */
904 #else
905 i = 0, inc = 1;
906 #endif
908 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
909 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
911 tree type = TREE_TYPE (TREE_VALUE (p));
912 int unsignedp;
913 enum machine_mode mode;
915 args[i].tree_value = TREE_VALUE (p);
917 /* Replace erroneous argument with constant zero. */
918 if (type == error_mark_node || TYPE_SIZE (type) == 0)
919 args[i].tree_value = integer_zero_node, type = integer_type_node;
921 /* If TYPE is a transparent union, pass things the way we would
922 pass the first field of the union. We have already verified that
923 the modes are the same. */
924 if (TYPE_TRANSPARENT_UNION (type))
925 type = TREE_TYPE (TYPE_FIELDS (type));
927 /* Decide where to pass this arg.
929 args[i].reg is nonzero if all or part is passed in registers.
931 args[i].partial is nonzero if part but not all is passed in registers,
932 and the exact value says how many words are passed in registers.
934 args[i].pass_on_stack is nonzero if the argument must at least be
935 computed on the stack. It may then be loaded back into registers
936 if args[i].reg is nonzero.
938 These decisions are driven by the FUNCTION_... macros and must agree
939 with those made by function.c. */
941 /* See if this argument should be passed by invisible reference. */
942 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
943 && contains_placeholder_p (TYPE_SIZE (type)))
944 || TREE_ADDRESSABLE (type)
945 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
946 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
947 type, argpos < n_named_args)
948 #endif
951 /* If we're compiling a thunk, pass through invisible
952 references instead of making a copy. */
953 if (current_function_is_thunk
954 #ifdef FUNCTION_ARG_CALLEE_COPIES
955 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
956 type, argpos < n_named_args)
957 /* If it's in a register, we must make a copy of it too. */
958 /* ??? Is this a sufficient test? Is there a better one? */
959 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
960 && REG_P (DECL_RTL (args[i].tree_value)))
961 && ! TREE_ADDRESSABLE (type))
962 #endif
965 /* C++ uses a TARGET_EXPR to indicate that we want to make a
966 new object from the argument. If we are passing by
967 invisible reference, the callee will do that for us, so we
968 can strip off the TARGET_EXPR. This is not always safe,
969 but it is safe in the only case where this is a useful
970 optimization; namely, when the argument is a plain object.
971 In that case, the frontend is just asking the backend to
972 make a bitwise copy of the argument. */
974 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
975 && (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND
976 (args[i].tree_value, 1)))
977 == 'd')
978 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
979 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
981 args[i].tree_value = build1 (ADDR_EXPR,
982 build_pointer_type (type),
983 args[i].tree_value);
984 type = build_pointer_type (type);
986 else
988 /* We make a copy of the object and pass the address to the
989 function being called. */
990 rtx copy;
992 if (TYPE_SIZE (type) == 0
993 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
994 || (flag_stack_check && ! STACK_CHECK_BUILTIN
995 && (TREE_INT_CST_HIGH (TYPE_SIZE (type)) != 0
996 || (TREE_INT_CST_LOW (TYPE_SIZE (type))
997 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
999 /* This is a variable-sized object. Make space on the stack
1000 for it. */
1001 rtx size_rtx = expr_size (TREE_VALUE (p));
1003 if (*old_stack_level == 0)
1005 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1006 *old_pending_adj = pending_stack_adjust;
1007 pending_stack_adjust = 0;
1010 copy = gen_rtx_MEM (BLKmode,
1011 allocate_dynamic_stack_space (size_rtx,
1012 NULL_RTX,
1013 TYPE_ALIGN (type)));
1015 else
1017 int size = int_size_in_bytes (type);
1018 copy = assign_stack_temp (TYPE_MODE (type), size, 0);
1021 MEM_SET_IN_STRUCT_P (copy, AGGREGATE_TYPE_P (type));
1023 store_expr (args[i].tree_value, copy, 0);
1024 *is_const = 0;
1026 args[i].tree_value = build1 (ADDR_EXPR,
1027 build_pointer_type (type),
1028 make_tree (type, copy));
1029 type = build_pointer_type (type);
1033 mode = TYPE_MODE (type);
1034 unsignedp = TREE_UNSIGNED (type);
1036 #ifdef PROMOTE_FUNCTION_ARGS
1037 mode = promote_mode (type, mode, &unsignedp, 1);
1038 #endif
1040 args[i].unsignedp = unsignedp;
1041 args[i].mode = mode;
1042 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1043 argpos < n_named_args);
1044 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1045 if (args[i].reg)
1046 args[i].partial
1047 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1048 argpos < n_named_args);
1049 #endif
1051 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1053 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1054 it means that we are to pass this arg in the register(s) designated
1055 by the PARALLEL, but also to pass it in the stack. */
1056 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1057 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1058 args[i].pass_on_stack = 1;
1060 /* If this is an addressable type, we must preallocate the stack
1061 since we must evaluate the object into its final location.
1063 If this is to be passed in both registers and the stack, it is simpler
1064 to preallocate. */
1065 if (TREE_ADDRESSABLE (type)
1066 || (args[i].pass_on_stack && args[i].reg != 0))
1067 *must_preallocate = 1;
1069 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1070 we cannot consider this function call constant. */
1071 if (TREE_ADDRESSABLE (type))
1072 *is_const = 0;
1074 /* Compute the stack-size of this argument. */
1075 if (args[i].reg == 0 || args[i].partial != 0
1076 || reg_parm_stack_space > 0
1077 || args[i].pass_on_stack)
1078 locate_and_pad_parm (mode, type,
1079 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1081 #else
1082 args[i].reg != 0,
1083 #endif
1084 fndecl, args_size, &args[i].offset,
1085 &args[i].size);
1087 #ifndef ARGS_GROW_DOWNWARD
1088 args[i].slot_offset = *args_size;
1089 #endif
1091 /* If a part of the arg was put into registers,
1092 don't include that part in the amount pushed. */
1093 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1094 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1095 / (PARM_BOUNDARY / BITS_PER_UNIT)
1096 * (PARM_BOUNDARY / BITS_PER_UNIT));
1098 /* Update ARGS_SIZE, the total stack space for args so far. */
1100 args_size->constant += args[i].size.constant;
1101 if (args[i].size.var)
1103 ADD_PARM_SIZE (*args_size, args[i].size.var);
1106 /* Since the slot offset points to the bottom of the slot,
1107 we must record it after incrementing if the args grow down. */
1108 #ifdef ARGS_GROW_DOWNWARD
1109 args[i].slot_offset = *args_size;
1111 args[i].slot_offset.constant = -args_size->constant;
1112 if (args_size->var)
1114 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
1116 #endif
1118 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1119 have been used, etc. */
1121 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1122 argpos < n_named_args);
1126 /* Update ARGS_SIZE to contain the total size for the argument block.
1127 Return the original constant component of the argument block's size.
1129 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1130 for arguments passed in registers. */
1132 static int
1133 compute_argument_block_size (reg_parm_stack_space, args_size)
1134 int reg_parm_stack_space;
1135 struct args_size *args_size;
1137 int unadjusted_args_size = args_size->constant;
1139 /* Compute the actual size of the argument block required. The variable
1140 and constant sizes must be combined, the size may have to be rounded,
1141 and there may be a minimum required size. */
1143 if (args_size->var)
1145 args_size->var = ARGS_SIZE_TREE (*args_size);
1146 args_size->constant = 0;
1148 #ifdef PREFERRED_STACK_BOUNDARY
1149 if (PREFERRED_STACK_BOUNDARY != BITS_PER_UNIT)
1150 args_size->var = round_up (args_size->var, STACK_BYTES);
1151 #endif
1153 if (reg_parm_stack_space > 0)
1155 args_size->var
1156 = size_binop (MAX_EXPR, args_size->var,
1157 size_int (reg_parm_stack_space));
1159 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1160 /* The area corresponding to register parameters is not to count in
1161 the size of the block we need. So make the adjustment. */
1162 args_size->var
1163 = size_binop (MINUS_EXPR, args_size->var,
1164 size_int (reg_parm_stack_space));
1165 #endif
1168 else
1170 #ifdef PREFERRED_STACK_BOUNDARY
1171 args_size->constant = (((args_size->constant
1172 + pending_stack_adjust
1173 + STACK_BYTES - 1)
1174 / STACK_BYTES * STACK_BYTES)
1175 - pending_stack_adjust);
1176 #endif
1178 args_size->constant = MAX (args_size->constant,
1179 reg_parm_stack_space);
1181 #ifdef MAYBE_REG_PARM_STACK_SPACE
1182 if (reg_parm_stack_space == 0)
1183 args_size->constant = 0;
1184 #endif
1186 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1187 args_size->constant -= reg_parm_stack_space;
1188 #endif
1190 return unadjusted_args_size;
1193 /* Precompute parameters has needed for a function call.
1195 IS_CONST indicates the target function is a pure function.
1197 MUST_PREALLOCATE indicates that we must preallocate stack space for
1198 any stack arguments.
1200 NUM_ACTUALS is the number of arguments.
1202 ARGS is an array containing information for each argument; this routine
1203 fills in the INITIAL_VALUE and VALUE fields for each precomputed argument.
1205 ARGS_SIZE contains information about the size of the arg list. */
1207 static void
1208 precompute_arguments (is_const, must_preallocate, num_actuals, args, args_size)
1209 int is_const;
1210 int must_preallocate;
1211 int num_actuals;
1212 struct arg_data *args;
1213 struct args_size *args_size;
1215 int i;
1217 /* If this function call is cse'able, precompute all the parameters.
1218 Note that if the parameter is constructed into a temporary, this will
1219 cause an additional copy because the parameter will be constructed
1220 into a temporary location and then copied into the outgoing arguments.
1221 If a parameter contains a call to alloca and this function uses the
1222 stack, precompute the parameter. */
1224 /* If we preallocated the stack space, and some arguments must be passed
1225 on the stack, then we must precompute any parameter which contains a
1226 function call which will store arguments on the stack.
1227 Otherwise, evaluating the parameter may clobber previous parameters
1228 which have already been stored into the stack. */
1230 for (i = 0; i < num_actuals; i++)
1231 if (is_const
1232 || ((args_size->var != 0 || args_size->constant != 0)
1233 && calls_function (args[i].tree_value, 1))
1234 || (must_preallocate
1235 && (args_size->var != 0 || args_size->constant != 0)
1236 && calls_function (args[i].tree_value, 0)))
1238 /* If this is an addressable type, we cannot pre-evaluate it. */
1239 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1240 abort ();
1242 push_temp_slots ();
1244 args[i].initial_value = args[i].value
1245 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1247 preserve_temp_slots (args[i].value);
1248 pop_temp_slots ();
1250 /* ANSI doesn't require a sequence point here,
1251 but PCC has one, so this will avoid some problems. */
1252 emit_queue ();
1254 args[i].initial_value = args[i].value
1255 = protect_from_queue (args[i].initial_value, 0);
1257 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) != args[i].mode)
1258 args[i].value
1259 = convert_modes (args[i].mode,
1260 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
1261 args[i].value, args[i].unsignedp);
1265 /* Given the current state of MUST_PREALLOCATE and information about
1266 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1267 compute and return the final value for MUST_PREALLOCATE. */
1269 static int
1270 finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1271 int must_preallocate;
1272 int num_actuals;
1273 struct arg_data *args;
1274 struct args_size *args_size;
1276 /* See if we have or want to preallocate stack space.
1278 If we would have to push a partially-in-regs parm
1279 before other stack parms, preallocate stack space instead.
1281 If the size of some parm is not a multiple of the required stack
1282 alignment, we must preallocate.
1284 If the total size of arguments that would otherwise create a copy in
1285 a temporary (such as a CALL) is more than half the total argument list
1286 size, preallocation is faster.
1288 Another reason to preallocate is if we have a machine (like the m88k)
1289 where stack alignment is required to be maintained between every
1290 pair of insns, not just when the call is made. However, we assume here
1291 that such machines either do not have push insns (and hence preallocation
1292 would occur anyway) or the problem is taken care of with
1293 PUSH_ROUNDING. */
1295 if (! must_preallocate)
1297 int partial_seen = 0;
1298 int copy_to_evaluate_size = 0;
1299 int i;
1301 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1303 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1304 partial_seen = 1;
1305 else if (partial_seen && args[i].reg == 0)
1306 must_preallocate = 1;
1308 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1309 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1310 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1311 || TREE_CODE (args[i].tree_value) == COND_EXPR
1312 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1313 copy_to_evaluate_size
1314 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1317 if (copy_to_evaluate_size * 2 >= args_size->constant
1318 && args_size->constant > 0)
1319 must_preallocate = 1;
1321 return must_preallocate;
1324 /* If we preallocated stack space, compute the address of each argument
1325 and store it into the ARGS array.
1327 We need not ensure it is a valid memory address here; it will be
1328 validized when it is used.
1330 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1332 static void
1333 compute_argument_addresses (args, argblock, num_actuals)
1334 struct arg_data *args;
1335 rtx argblock;
1336 int num_actuals;
1338 if (argblock)
1340 rtx arg_reg = argblock;
1341 int i, arg_offset = 0;
1343 if (GET_CODE (argblock) == PLUS)
1344 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1346 for (i = 0; i < num_actuals; i++)
1348 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1349 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1350 rtx addr;
1352 /* Skip this parm if it will not be passed on the stack. */
1353 if (! args[i].pass_on_stack && args[i].reg != 0)
1354 continue;
1356 if (GET_CODE (offset) == CONST_INT)
1357 addr = plus_constant (arg_reg, INTVAL (offset));
1358 else
1359 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1361 addr = plus_constant (addr, arg_offset);
1362 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1363 MEM_SET_IN_STRUCT_P
1364 (args[i].stack,
1365 AGGREGATE_TYPE_P (TREE_TYPE (args[i].tree_value)));
1367 if (GET_CODE (slot_offset) == CONST_INT)
1368 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1369 else
1370 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1372 addr = plus_constant (addr, arg_offset);
1373 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1378 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1379 in a call instruction.
1381 FNDECL is the tree node for the target function. For an indirect call
1382 FNDECL will be NULL_TREE.
1384 EXP is the CALL_EXPR for this call. */
1386 static rtx
1387 rtx_for_function_call (fndecl, exp)
1388 tree fndecl;
1389 tree exp;
1391 rtx funexp;
1393 /* Get the function to call, in the form of RTL. */
1394 if (fndecl)
1396 /* If this is the first use of the function, see if we need to
1397 make an external definition for it. */
1398 if (! TREE_USED (fndecl))
1400 assemble_external (fndecl);
1401 TREE_USED (fndecl) = 1;
1404 /* Get a SYMBOL_REF rtx for the function address. */
1405 funexp = XEXP (DECL_RTL (fndecl), 0);
1407 else
1408 /* Generate an rtx (probably a pseudo-register) for the address. */
1410 rtx funaddr;
1411 push_temp_slots ();
1412 funaddr = funexp =
1413 expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1414 pop_temp_slots (); /* FUNEXP can't be BLKmode */
1416 /* Check the function is executable. */
1417 if (current_function_check_memory_usage)
1419 #ifdef POINTERS_EXTEND_UNSIGNED
1420 /* It might be OK to convert funexp in place, but there's
1421 a lot going on between here and when it happens naturally
1422 that this seems safer. */
1423 funaddr = convert_memory_address (Pmode, funexp);
1424 #endif
1425 emit_library_call (chkr_check_exec_libfunc, 1,
1426 VOIDmode, 1,
1427 funaddr, Pmode);
1429 emit_queue ();
1431 return funexp;
1434 /* Do the register loads required for any wholly-register parms or any
1435 parms which are passed both on the stack and in a register. Their
1436 expressions were already evaluated.
1438 Mark all register-parms as living through the call, putting these USE
1439 insns in the CALL_INSN_FUNCTION_USAGE field. */
1441 static void
1442 load_register_parameters (args, num_actuals, call_fusage)
1443 struct arg_data *args;
1444 int num_actuals;
1445 rtx *call_fusage;
1447 int i, j;
1449 #ifdef LOAD_ARGS_REVERSED
1450 for (i = num_actuals - 1; i >= 0; i--)
1451 #else
1452 for (i = 0; i < num_actuals; i++)
1453 #endif
1455 rtx reg = args[i].reg;
1456 int partial = args[i].partial;
1457 int nregs;
1459 if (reg)
1461 /* Set to non-negative if must move a word at a time, even if just
1462 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1463 we just use a normal move insn. This value can be zero if the
1464 argument is a zero size structure with no fields. */
1465 nregs = (partial ? partial
1466 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1467 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1468 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1469 : -1));
1471 /* Handle calls that pass values in multiple non-contiguous
1472 locations. The Irix 6 ABI has examples of this. */
1474 if (GET_CODE (reg) == PARALLEL)
1476 emit_group_load (reg, args[i].value,
1477 int_size_in_bytes (TREE_TYPE (args[i].tree_value)),
1478 (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1479 / BITS_PER_UNIT));
1482 /* If simple case, just do move. If normal partial, store_one_arg
1483 has already loaded the register for us. In all other cases,
1484 load the register(s) from memory. */
1486 else if (nregs == -1)
1487 emit_move_insn (reg, args[i].value);
1489 /* If we have pre-computed the values to put in the registers in
1490 the case of non-aligned structures, copy them in now. */
1492 else if (args[i].n_aligned_regs != 0)
1493 for (j = 0; j < args[i].n_aligned_regs; j++)
1494 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1495 args[i].aligned_regs[j]);
1497 else if (partial == 0 || args[i].pass_on_stack)
1498 move_block_to_reg (REGNO (reg),
1499 validize_mem (args[i].value), nregs,
1500 args[i].mode);
1502 /* Handle calls that pass values in multiple non-contiguous
1503 locations. The Irix 6 ABI has examples of this. */
1504 if (GET_CODE (reg) == PARALLEL)
1505 use_group_regs (call_fusage, reg);
1506 else if (nregs == -1)
1507 use_reg (call_fusage, reg);
1508 else
1509 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1514 /* Generate all the code for a function call
1515 and return an rtx for its value.
1516 Store the value in TARGET (specified as an rtx) if convenient.
1517 If the value is stored in TARGET then TARGET is returned.
1518 If IGNORE is nonzero, then we ignore the value of the function call. */
1521 expand_call (exp, target, ignore)
1522 tree exp;
1523 rtx target;
1524 int ignore;
1526 /* List of actual parameters. */
1527 tree actparms = TREE_OPERAND (exp, 1);
1528 /* RTX for the function to be called. */
1529 rtx funexp;
1530 /* Data type of the function. */
1531 tree funtype;
1532 /* Declaration of the function being called,
1533 or 0 if the function is computed (not known by name). */
1534 tree fndecl = 0;
1535 char *name = 0;
1537 /* Register in which non-BLKmode value will be returned,
1538 or 0 if no value or if value is BLKmode. */
1539 rtx valreg;
1540 /* Address where we should return a BLKmode value;
1541 0 if value not BLKmode. */
1542 rtx structure_value_addr = 0;
1543 /* Nonzero if that address is being passed by treating it as
1544 an extra, implicit first parameter. Otherwise,
1545 it is passed by being copied directly into struct_value_rtx. */
1546 int structure_value_addr_parm = 0;
1547 /* Size of aggregate value wanted, or zero if none wanted
1548 or if we are using the non-reentrant PCC calling convention
1549 or expecting the value in registers. */
1550 HOST_WIDE_INT struct_value_size = 0;
1551 /* Nonzero if called function returns an aggregate in memory PCC style,
1552 by returning the address of where to find it. */
1553 int pcc_struct_value = 0;
1555 /* Number of actual parameters in this call, including struct value addr. */
1556 int num_actuals;
1557 /* Number of named args. Args after this are anonymous ones
1558 and they must all go on the stack. */
1559 int n_named_args;
1561 /* Vector of information about each argument.
1562 Arguments are numbered in the order they will be pushed,
1563 not the order they are written. */
1564 struct arg_data *args;
1566 /* Total size in bytes of all the stack-parms scanned so far. */
1567 struct args_size args_size;
1568 /* Size of arguments before any adjustments (such as rounding). */
1569 int unadjusted_args_size;
1570 /* Data on reg parms scanned so far. */
1571 CUMULATIVE_ARGS args_so_far;
1572 /* Nonzero if a reg parm has been scanned. */
1573 int reg_parm_seen;
1574 /* Nonzero if this is an indirect function call. */
1576 /* Nonzero if we must avoid push-insns in the args for this call.
1577 If stack space is allocated for register parameters, but not by the
1578 caller, then it is preallocated in the fixed part of the stack frame.
1579 So the entire argument block must then be preallocated (i.e., we
1580 ignore PUSH_ROUNDING in that case). */
1582 #ifdef PUSH_ROUNDING
1583 int must_preallocate = 0;
1584 #else
1585 int must_preallocate = 1;
1586 #endif
1588 /* Size of the stack reserved for parameter registers. */
1589 int reg_parm_stack_space = 0;
1591 /* Address of space preallocated for stack parms
1592 (on machines that lack push insns), or 0 if space not preallocated. */
1593 rtx argblock = 0;
1595 /* Nonzero if it is plausible that this is a call to alloca. */
1596 int may_be_alloca;
1597 /* Nonzero if this is a call to malloc or a related function. */
1598 int is_malloc;
1599 /* Nonzero if this is a call to setjmp or a related function. */
1600 int returns_twice;
1601 /* Nonzero if this is a call to `longjmp'. */
1602 int is_longjmp;
1603 /* Nonzero if this is a call to an inline function. */
1604 int is_integrable = 0;
1605 /* Nonzero if this is a call to a `const' function.
1606 Note that only explicitly named functions are handled as `const' here. */
1607 int is_const = 0;
1608 /* Nonzero if this is a call to a `volatile' function. */
1609 int is_volatile = 0;
1610 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
1611 /* Define the boundary of the register parm stack space that needs to be
1612 save, if any. */
1613 int low_to_save = -1, high_to_save;
1614 rtx save_area = 0; /* Place that it is saved */
1615 #endif
1617 #ifdef ACCUMULATE_OUTGOING_ARGS
1618 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1619 char *initial_stack_usage_map = stack_usage_map;
1620 int old_stack_arg_under_construction;
1621 #endif
1623 rtx old_stack_level = 0;
1624 int old_pending_adj = 0;
1625 int old_inhibit_defer_pop = inhibit_defer_pop;
1626 rtx call_fusage = 0;
1627 register tree p;
1628 register int i;
1630 /* The value of the function call can be put in a hard register. But
1631 if -fcheck-memory-usage, code which invokes functions (and thus
1632 damages some hard registers) can be inserted before using the value.
1633 So, target is always a pseudo-register in that case. */
1634 if (current_function_check_memory_usage)
1635 target = 0;
1637 /* See if we can find a DECL-node for the actual function.
1638 As a result, decide whether this is a call to an integrable function. */
1640 p = TREE_OPERAND (exp, 0);
1641 if (TREE_CODE (p) == ADDR_EXPR)
1643 fndecl = TREE_OPERAND (p, 0);
1644 if (TREE_CODE (fndecl) != FUNCTION_DECL)
1645 fndecl = 0;
1646 else
1648 if (!flag_no_inline
1649 && fndecl != current_function_decl
1650 && DECL_INLINE (fndecl)
1651 && DECL_SAVED_INSNS (fndecl)
1652 && RTX_INTEGRATED_P (DECL_SAVED_INSNS (fndecl)))
1653 is_integrable = 1;
1654 else if (! TREE_ADDRESSABLE (fndecl))
1656 /* In case this function later becomes inlinable,
1657 record that there was already a non-inline call to it.
1659 Use abstraction instead of setting TREE_ADDRESSABLE
1660 directly. */
1661 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1662 && optimize > 0)
1664 warning_with_decl (fndecl, "can't inline call to `%s'");
1665 warning ("called from here");
1667 mark_addressable (fndecl);
1670 if (TREE_READONLY (fndecl) && ! TREE_THIS_VOLATILE (fndecl)
1671 && TYPE_MODE (TREE_TYPE (exp)) != VOIDmode)
1672 is_const = 1;
1674 if (TREE_THIS_VOLATILE (fndecl))
1675 is_volatile = 1;
1679 /* If we don't have specific function to call, see if we have a
1680 constant or `noreturn' function from the type. */
1681 if (fndecl == 0)
1683 is_const = TREE_READONLY (TREE_TYPE (TREE_TYPE (p)));
1684 is_volatile = TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (p)));
1687 #ifdef REG_PARM_STACK_SPACE
1688 #ifdef MAYBE_REG_PARM_STACK_SPACE
1689 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1690 #else
1691 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1692 #endif
1693 #endif
1695 #if defined(PUSH_ROUNDING) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1696 if (reg_parm_stack_space > 0)
1697 must_preallocate = 1;
1698 #endif
1700 /* Warn if this value is an aggregate type,
1701 regardless of which calling convention we are using for it. */
1702 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
1703 warning ("function call has aggregate value");
1705 /* Set up a place to return a structure. */
1707 /* Cater to broken compilers. */
1708 if (aggregate_value_p (exp))
1710 /* This call returns a big structure. */
1711 is_const = 0;
1713 #ifdef PCC_STATIC_STRUCT_RETURN
1715 pcc_struct_value = 1;
1716 /* Easier than making that case work right. */
1717 if (is_integrable)
1719 /* In case this is a static function, note that it has been
1720 used. */
1721 if (! TREE_ADDRESSABLE (fndecl))
1722 mark_addressable (fndecl);
1723 is_integrable = 0;
1726 #else /* not PCC_STATIC_STRUCT_RETURN */
1728 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
1730 if (target && GET_CODE (target) == MEM)
1731 structure_value_addr = XEXP (target, 0);
1732 else
1734 /* Assign a temporary to hold the value. */
1735 tree d;
1737 /* For variable-sized objects, we must be called with a target
1738 specified. If we were to allocate space on the stack here,
1739 we would have no way of knowing when to free it. */
1741 if (struct_value_size < 0)
1742 abort ();
1744 /* This DECL is just something to feed to mark_addressable;
1745 it doesn't get pushed. */
1746 d = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
1747 DECL_RTL (d) = assign_temp (TREE_TYPE (exp), 1, 0, 1);
1748 mark_addressable (d);
1749 structure_value_addr = XEXP (DECL_RTL (d), 0);
1750 TREE_USED (d) = 1;
1751 target = 0;
1754 #endif /* not PCC_STATIC_STRUCT_RETURN */
1757 /* If called function is inline, try to integrate it. */
1759 if (is_integrable)
1761 rtx temp;
1762 #ifdef ACCUMULATE_OUTGOING_ARGS
1763 rtx before_call = get_last_insn ();
1764 #endif
1766 temp = expand_inline_function (fndecl, actparms, target,
1767 ignore, TREE_TYPE (exp),
1768 structure_value_addr);
1770 /* If inlining succeeded, return. */
1771 if (temp != (rtx) (HOST_WIDE_INT) -1)
1773 #ifdef ACCUMULATE_OUTGOING_ARGS
1774 /* If the outgoing argument list must be preserved, push
1775 the stack before executing the inlined function if it
1776 makes any calls. */
1778 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1779 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1780 break;
1782 if (stack_arg_under_construction || i >= 0)
1784 rtx first_insn
1785 = before_call ? NEXT_INSN (before_call) : get_insns ();
1786 rtx insn, seq;
1788 /* Look for a call in the inline function code.
1789 If OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) is
1790 nonzero then there is a call and it is not necessary
1791 to scan the insns. */
1793 if (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl)) == 0)
1794 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1795 if (GET_CODE (insn) == CALL_INSN)
1796 break;
1798 if (insn)
1800 /* Reserve enough stack space so that the largest
1801 argument list of any function call in the inline
1802 function does not overlap the argument list being
1803 evaluated. This is usually an overestimate because
1804 allocate_dynamic_stack_space reserves space for an
1805 outgoing argument list in addition to the requested
1806 space, but there is no way to ask for stack space such
1807 that an argument list of a certain length can be
1808 safely constructed.
1810 Add the stack space reserved for register arguments, if
1811 any, in the inline function. What is really needed is the
1812 largest value of reg_parm_stack_space in the inline
1813 function, but that is not available. Using the current
1814 value of reg_parm_stack_space is wrong, but gives
1815 correct results on all supported machines. */
1817 int adjust = (OUTGOING_ARGS_SIZE (DECL_SAVED_INSNS (fndecl))
1818 + reg_parm_stack_space);
1820 start_sequence ();
1821 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1822 allocate_dynamic_stack_space (GEN_INT (adjust),
1823 NULL_RTX, BITS_PER_UNIT);
1824 seq = get_insns ();
1825 end_sequence ();
1826 emit_insns_before (seq, first_insn);
1827 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1830 #endif
1832 /* If the result is equivalent to TARGET, return TARGET to simplify
1833 checks in store_expr. They can be equivalent but not equal in the
1834 case of a function that returns BLKmode. */
1835 if (temp != target && rtx_equal_p (temp, target))
1836 return target;
1837 return temp;
1840 /* If inlining failed, mark FNDECL as needing to be compiled
1841 separately after all. If function was declared inline,
1842 give a warning. */
1843 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1844 && optimize > 0 && ! TREE_ADDRESSABLE (fndecl))
1846 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1847 warning ("called from here");
1849 mark_addressable (fndecl);
1852 function_call_count++;
1854 if (fndecl && DECL_NAME (fndecl))
1855 name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
1857 /* See if this is a call to a function that can return more than once
1858 or a call to longjmp or malloc. */
1859 special_function_p (name, fndecl, &returns_twice, &is_longjmp,
1860 &is_malloc, &may_be_alloca);
1862 if (may_be_alloca)
1863 current_function_calls_alloca = 1;
1865 /* Operand 0 is a pointer-to-function; get the type of the function. */
1866 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
1867 if (! POINTER_TYPE_P (funtype))
1868 abort ();
1869 funtype = TREE_TYPE (funtype);
1871 /* When calling a const function, we must pop the stack args right away,
1872 so that the pop is deleted or moved with the call. */
1873 if (is_const)
1874 NO_DEFER_POP;
1876 /* Don't let pending stack adjusts add up to too much.
1877 Also, do all pending adjustments now
1878 if there is any chance this might be a call to alloca. */
1880 if (pending_stack_adjust >= 32
1881 || (pending_stack_adjust > 0 && may_be_alloca))
1882 do_pending_stack_adjust ();
1884 /* Push the temporary stack slot level so that we can free any temporaries
1885 we make. */
1886 push_temp_slots ();
1888 /* Start updating where the next arg would go.
1890 On some machines (such as the PA) indirect calls have a different
1891 calling convention than normal calls. The last argument in
1892 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
1893 or not. */
1894 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
1896 /* If struct_value_rtx is 0, it means pass the address
1897 as if it were an extra parameter. */
1898 if (structure_value_addr && struct_value_rtx == 0)
1900 /* If structure_value_addr is a REG other than
1901 virtual_outgoing_args_rtx, we can use always use it. If it
1902 is not a REG, we must always copy it into a register.
1903 If it is virtual_outgoing_args_rtx, we must copy it to another
1904 register in some cases. */
1905 rtx temp = (GET_CODE (structure_value_addr) != REG
1906 #ifdef ACCUMULATE_OUTGOING_ARGS
1907 || (stack_arg_under_construction
1908 && structure_value_addr == virtual_outgoing_args_rtx)
1909 #endif
1910 ? copy_addr_to_reg (structure_value_addr)
1911 : structure_value_addr);
1913 actparms
1914 = tree_cons (error_mark_node,
1915 make_tree (build_pointer_type (TREE_TYPE (funtype)),
1916 temp),
1917 actparms);
1918 structure_value_addr_parm = 1;
1921 /* Count the arguments and set NUM_ACTUALS. */
1922 for (p = actparms, i = 0; p; p = TREE_CHAIN (p)) i++;
1923 num_actuals = i;
1925 /* Compute number of named args.
1926 Normally, don't include the last named arg if anonymous args follow.
1927 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
1928 (If no anonymous args follow, the result of list_length is actually
1929 one too large. This is harmless.)
1931 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
1932 zero, this machine will be able to place unnamed args that were passed in
1933 registers into the stack. So treat all args as named. This allows the
1934 insns emitting for a specific argument list to be independent of the
1935 function declaration.
1937 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any reliable
1938 way to pass unnamed args in registers, so we must force them into
1939 memory. */
1941 if ((STRICT_ARGUMENT_NAMING
1942 || ! PRETEND_OUTGOING_VARARGS_NAMED)
1943 && TYPE_ARG_TYPES (funtype) != 0)
1944 n_named_args
1945 = (list_length (TYPE_ARG_TYPES (funtype))
1946 /* Don't include the last named arg. */
1947 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
1948 /* Count the struct value address, if it is passed as a parm. */
1949 + structure_value_addr_parm);
1950 else
1951 /* If we know nothing, treat all args as named. */
1952 n_named_args = num_actuals;
1954 /* Make a vector to hold all the information about each arg. */
1955 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
1956 bzero ((char *) args, num_actuals * sizeof (struct arg_data));
1958 /* Build up entries inthe ARGS array, compute the size of the arguments
1959 into ARGS_SIZE, etc. */
1960 initialize_argument_information (num_actuals, args, &args_size, n_named_args,
1961 actparms, fndecl, &args_so_far,
1962 reg_parm_stack_space, &old_stack_level,
1963 &old_pending_adj, &must_preallocate,
1964 &is_const);
1966 #ifdef FINAL_REG_PARM_STACK_SPACE
1967 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
1968 args_size.var);
1969 #endif
1971 if (args_size.var)
1973 /* If this function requires a variable-sized argument list, don't try to
1974 make a cse'able block for this call. We may be able to do this
1975 eventually, but it is too complicated to keep track of what insns go
1976 in the cse'able block and which don't. */
1978 is_const = 0;
1979 must_preallocate = 1;
1982 /* Compute the actual size of the argument block required. The variable
1983 and constant sizes must be combined, the size may have to be rounded,
1984 and there may be a minimum required size. */
1985 unadjusted_args_size
1986 = compute_argument_block_size (reg_parm_stack_space, &args_size);
1988 /* Now make final decision about preallocating stack space. */
1989 must_preallocate = finalize_must_preallocate (must_preallocate,
1990 num_actuals, args, &args_size);
1992 /* If the structure value address will reference the stack pointer, we must
1993 stabilize it. We don't need to do this if we know that we are not going
1994 to adjust the stack pointer in processing this call. */
1996 if (structure_value_addr
1997 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
1998 || reg_mentioned_p (virtual_outgoing_args_rtx, structure_value_addr))
1999 && (args_size.var
2000 #ifndef ACCUMULATE_OUTGOING_ARGS
2001 || args_size.constant
2002 #endif
2004 structure_value_addr = copy_to_reg (structure_value_addr);
2006 /* Precompute any arguments as needed. */
2007 precompute_arguments (is_const, must_preallocate, num_actuals,
2008 args, &args_size);
2010 /* Now we are about to start emitting insns that can be deleted
2011 if a libcall is deleted. */
2012 if (is_const || is_malloc)
2013 start_sequence ();
2015 /* If we have no actual push instructions, or shouldn't use them,
2016 make space for all args right now. */
2018 if (args_size.var != 0)
2020 if (old_stack_level == 0)
2022 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2023 old_pending_adj = pending_stack_adjust;
2024 pending_stack_adjust = 0;
2025 #ifdef ACCUMULATE_OUTGOING_ARGS
2026 /* stack_arg_under_construction says whether a stack arg is
2027 being constructed at the old stack level. Pushing the stack
2028 gets a clean outgoing argument block. */
2029 old_stack_arg_under_construction = stack_arg_under_construction;
2030 stack_arg_under_construction = 0;
2031 #endif
2033 argblock = push_block (ARGS_SIZE_RTX (args_size), 0, 0);
2035 else
2037 /* Note that we must go through the motions of allocating an argument
2038 block even if the size is zero because we may be storing args
2039 in the area reserved for register arguments, which may be part of
2040 the stack frame. */
2042 int needed = args_size.constant;
2044 /* Store the maximum argument space used. It will be pushed by
2045 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2046 checking). */
2048 if (needed > current_function_outgoing_args_size)
2049 current_function_outgoing_args_size = needed;
2051 if (must_preallocate)
2053 #ifdef ACCUMULATE_OUTGOING_ARGS
2054 /* Since the stack pointer will never be pushed, it is possible for
2055 the evaluation of a parm to clobber something we have already
2056 written to the stack. Since most function calls on RISC machines
2057 do not use the stack, this is uncommon, but must work correctly.
2059 Therefore, we save any area of the stack that was already written
2060 and that we are using. Here we set up to do this by making a new
2061 stack usage map from the old one. The actual save will be done
2062 by store_one_arg.
2064 Another approach might be to try to reorder the argument
2065 evaluations to avoid this conflicting stack usage. */
2067 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2068 /* Since we will be writing into the entire argument area, the
2069 map must be allocated for its entire size, not just the part that
2070 is the responsibility of the caller. */
2071 needed += reg_parm_stack_space;
2072 #endif
2074 #ifdef ARGS_GROW_DOWNWARD
2075 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2076 needed + 1);
2077 #else
2078 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2079 needed);
2080 #endif
2081 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2083 if (initial_highest_arg_in_use)
2084 bcopy (initial_stack_usage_map, stack_usage_map,
2085 initial_highest_arg_in_use);
2087 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2088 bzero (&stack_usage_map[initial_highest_arg_in_use],
2089 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2090 needed = 0;
2092 /* The address of the outgoing argument list must not be copied to a
2093 register here, because argblock would be left pointing to the
2094 wrong place after the call to allocate_dynamic_stack_space below.
2097 argblock = virtual_outgoing_args_rtx;
2099 #else /* not ACCUMULATE_OUTGOING_ARGS */
2100 if (inhibit_defer_pop == 0)
2102 /* Try to reuse some or all of the pending_stack_adjust
2103 to get this space. Maybe we can avoid any pushing. */
2104 if (needed > pending_stack_adjust)
2106 needed -= pending_stack_adjust;
2107 pending_stack_adjust = 0;
2109 else
2111 pending_stack_adjust -= needed;
2112 needed = 0;
2115 /* Special case this because overhead of `push_block' in this
2116 case is non-trivial. */
2117 if (needed == 0)
2118 argblock = virtual_outgoing_args_rtx;
2119 else
2120 argblock = push_block (GEN_INT (needed), 0, 0);
2122 /* We only really need to call `copy_to_reg' in the case where push
2123 insns are going to be used to pass ARGBLOCK to a function
2124 call in ARGS. In that case, the stack pointer changes value
2125 from the allocation point to the call point, and hence
2126 the value of VIRTUAL_OUTGOING_ARGS_RTX changes as well.
2127 But might as well always do it. */
2128 argblock = copy_to_reg (argblock);
2129 #endif /* not ACCUMULATE_OUTGOING_ARGS */
2133 #ifdef ACCUMULATE_OUTGOING_ARGS
2134 /* The save/restore code in store_one_arg handles all cases except one:
2135 a constructor call (including a C function returning a BLKmode struct)
2136 to initialize an argument. */
2137 if (stack_arg_under_construction)
2139 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2140 rtx push_size = GEN_INT (reg_parm_stack_space + args_size.constant);
2141 #else
2142 rtx push_size = GEN_INT (args_size.constant);
2143 #endif
2144 if (old_stack_level == 0)
2146 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2147 old_pending_adj = pending_stack_adjust;
2148 pending_stack_adjust = 0;
2149 /* stack_arg_under_construction says whether a stack arg is
2150 being constructed at the old stack level. Pushing the stack
2151 gets a clean outgoing argument block. */
2152 old_stack_arg_under_construction = stack_arg_under_construction;
2153 stack_arg_under_construction = 0;
2154 /* Make a new map for the new argument list. */
2155 stack_usage_map = (char *)alloca (highest_outgoing_arg_in_use);
2156 bzero (stack_usage_map, highest_outgoing_arg_in_use);
2157 highest_outgoing_arg_in_use = 0;
2159 allocate_dynamic_stack_space (push_size, NULL_RTX, BITS_PER_UNIT);
2161 /* If argument evaluation might modify the stack pointer, copy the
2162 address of the argument list to a register. */
2163 for (i = 0; i < num_actuals; i++)
2164 if (args[i].pass_on_stack)
2166 argblock = copy_addr_to_reg (argblock);
2167 break;
2169 #endif
2171 compute_argument_addresses (args, argblock, num_actuals);
2173 #ifdef PUSH_ARGS_REVERSED
2174 #ifdef PREFERRED_STACK_BOUNDARY
2175 /* If we push args individually in reverse order, perform stack alignment
2176 before the first push (the last arg). */
2177 if (argblock == 0)
2178 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
2179 #endif
2180 #endif
2182 /* Don't try to defer pops if preallocating, not even from the first arg,
2183 since ARGBLOCK probably refers to the SP. */
2184 if (argblock)
2185 NO_DEFER_POP;
2187 funexp = rtx_for_function_call (fndecl, exp);
2189 /* Figure out the register where the value, if any, will come back. */
2190 valreg = 0;
2191 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2192 && ! structure_value_addr)
2194 if (pcc_struct_value)
2195 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2196 fndecl);
2197 else
2198 valreg = hard_function_value (TREE_TYPE (exp), fndecl);
2201 /* Precompute all register parameters. It isn't safe to compute anything
2202 once we have started filling any specific hard regs. */
2203 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2205 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2207 /* Save the fixed argument area if it's part of the caller's frame and
2208 is clobbered by argument setup for this call. */
2209 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2210 &low_to_save, &high_to_save);
2211 #endif
2214 /* Now store (and compute if necessary) all non-register parms.
2215 These come before register parms, since they can require block-moves,
2216 which could clobber the registers used for register parms.
2217 Parms which have partial registers are not stored here,
2218 but we do preallocate space here if they want that. */
2220 for (i = 0; i < num_actuals; i++)
2221 if (args[i].reg == 0 || args[i].pass_on_stack)
2222 store_one_arg (&args[i], argblock, may_be_alloca,
2223 args_size.var != 0, reg_parm_stack_space);
2225 /* If we have a parm that is passed in registers but not in memory
2226 and whose alignment does not permit a direct copy into registers,
2227 make a group of pseudos that correspond to each register that we
2228 will later fill. */
2229 if (STRICT_ALIGNMENT)
2230 store_unaligned_arguments_into_pseudos (args, num_actuals);
2232 /* Now store any partially-in-registers parm.
2233 This is the last place a block-move can happen. */
2234 if (reg_parm_seen)
2235 for (i = 0; i < num_actuals; i++)
2236 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2237 store_one_arg (&args[i], argblock, may_be_alloca,
2238 args_size.var != 0, reg_parm_stack_space);
2240 #ifndef PUSH_ARGS_REVERSED
2241 #ifdef PREFERRED_STACK_BOUNDARY
2242 /* If we pushed args in forward order, perform stack alignment
2243 after pushing the last arg. */
2244 if (argblock == 0)
2245 anti_adjust_stack (GEN_INT (args_size.constant - unadjusted_args_size));
2246 #endif
2247 #endif
2249 /* If register arguments require space on the stack and stack space
2250 was not preallocated, allocate stack space here for arguments
2251 passed in registers. */
2252 #if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE)
2253 if (must_preallocate == 0 && reg_parm_stack_space > 0)
2254 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2255 #endif
2257 /* Pass the function the address in which to return a structure value. */
2258 if (structure_value_addr && ! structure_value_addr_parm)
2260 emit_move_insn (struct_value_rtx,
2261 force_reg (Pmode,
2262 force_operand (structure_value_addr,
2263 NULL_RTX)));
2265 /* Mark the memory for the aggregate as write-only. */
2266 if (current_function_check_memory_usage)
2267 emit_library_call (chkr_set_right_libfunc, 1,
2268 VOIDmode, 3,
2269 structure_value_addr, Pmode,
2270 GEN_INT (struct_value_size), TYPE_MODE (sizetype),
2271 GEN_INT (MEMORY_USE_WO),
2272 TYPE_MODE (integer_type_node));
2274 if (GET_CODE (struct_value_rtx) == REG)
2275 use_reg (&call_fusage, struct_value_rtx);
2278 funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen);
2280 load_register_parameters (args, num_actuals, &call_fusage);
2282 /* Perform postincrements before actually calling the function. */
2283 emit_queue ();
2285 /* All arguments and registers used for the call must be set up by now! */
2287 /* Generate the actual call instruction. */
2288 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
2289 args_size.constant, struct_value_size,
2290 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2291 valreg, old_inhibit_defer_pop, call_fusage, is_const);
2293 /* If call is cse'able, make appropriate pair of reg-notes around it.
2294 Test valreg so we don't crash; may safely ignore `const'
2295 if return type is void. Disable for PARALLEL return values, because
2296 we have no way to move such values into a pseudo register. */
2297 if (is_const && valreg != 0 && GET_CODE (valreg) != PARALLEL)
2299 rtx note = 0;
2300 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2301 rtx insns;
2303 /* Mark the return value as a pointer if needed. */
2304 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2306 tree pointed_to = TREE_TYPE (TREE_TYPE (exp));
2307 mark_reg_pointer (temp, TYPE_ALIGN (pointed_to) / BITS_PER_UNIT);
2310 /* Construct an "equal form" for the value which mentions all the
2311 arguments in order as well as the function name. */
2312 #ifdef PUSH_ARGS_REVERSED
2313 for (i = 0; i < num_actuals; i++)
2314 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2315 #else
2316 for (i = num_actuals - 1; i >= 0; i--)
2317 note = gen_rtx_EXPR_LIST (VOIDmode, args[i].initial_value, note);
2318 #endif
2319 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2321 insns = get_insns ();
2322 end_sequence ();
2324 emit_libcall_block (insns, temp, valreg, note);
2326 valreg = temp;
2328 else if (is_const)
2330 /* Otherwise, just write out the sequence without a note. */
2331 rtx insns = get_insns ();
2333 end_sequence ();
2334 emit_insns (insns);
2336 else if (is_malloc)
2338 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2339 rtx last, insns;
2341 /* The return value from a malloc-like function is a pointer. */
2342 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2343 mark_reg_pointer (temp, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2345 emit_move_insn (temp, valreg);
2347 /* The return value from a malloc-like function can not alias
2348 anything else. */
2349 last = get_last_insn ();
2350 REG_NOTES (last) =
2351 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2353 /* Write out the sequence. */
2354 insns = get_insns ();
2355 end_sequence ();
2356 emit_insns (insns);
2357 valreg = temp;
2360 /* For calls to `setjmp', etc., inform flow.c it should complain
2361 if nonvolatile values are live. */
2363 if (returns_twice)
2365 emit_note (name, NOTE_INSN_SETJMP);
2366 current_function_calls_setjmp = 1;
2369 if (is_longjmp)
2370 current_function_calls_longjmp = 1;
2372 /* Notice functions that cannot return.
2373 If optimizing, insns emitted below will be dead.
2374 If not optimizing, they will exist, which is useful
2375 if the user uses the `return' command in the debugger. */
2377 if (is_volatile || is_longjmp)
2378 emit_barrier ();
2380 /* If value type not void, return an rtx for the value. */
2382 /* If there are cleanups to be called, don't use a hard reg as target.
2383 We need to double check this and see if it matters anymore. */
2384 if (any_pending_cleanups (1)
2385 && target && REG_P (target)
2386 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2387 target = 0;
2389 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2390 || ignore)
2392 target = const0_rtx;
2394 else if (structure_value_addr)
2396 if (target == 0 || GET_CODE (target) != MEM)
2398 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2399 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2400 structure_value_addr));
2401 MEM_SET_IN_STRUCT_P (target,
2402 AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2405 else if (pcc_struct_value)
2407 /* This is the special C++ case where we need to
2408 know what the true target was. We take care to
2409 never use this value more than once in one expression. */
2410 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2411 copy_to_reg (valreg));
2412 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2414 /* Handle calls that return values in multiple non-contiguous locations.
2415 The Irix 6 ABI has examples of this. */
2416 else if (GET_CODE (valreg) == PARALLEL)
2418 int bytes = int_size_in_bytes (TREE_TYPE (exp));
2420 if (target == 0)
2422 target = assign_stack_temp (TYPE_MODE (TREE_TYPE (exp)), bytes, 0);
2423 MEM_SET_IN_STRUCT_P (target, AGGREGATE_TYPE_P (TREE_TYPE (exp)));
2424 preserve_temp_slots (target);
2427 emit_group_store (target, valreg, bytes,
2428 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2430 else if (target && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2431 && GET_MODE (target) == GET_MODE (valreg))
2432 /* TARGET and VALREG cannot be equal at this point because the latter
2433 would not have REG_FUNCTION_VALUE_P true, while the former would if
2434 it were referring to the same register.
2436 If they refer to the same register, this move will be a no-op, except
2437 when function inlining is being done. */
2438 emit_move_insn (target, valreg);
2439 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2440 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
2441 else
2442 target = copy_to_reg (valreg);
2444 #ifdef PROMOTE_FUNCTION_RETURN
2445 /* If we promoted this return value, make the proper SUBREG. TARGET
2446 might be const0_rtx here, so be careful. */
2447 if (GET_CODE (target) == REG
2448 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2449 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2451 tree type = TREE_TYPE (exp);
2452 int unsignedp = TREE_UNSIGNED (type);
2454 /* If we don't promote as expected, something is wrong. */
2455 if (GET_MODE (target)
2456 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
2457 abort ();
2459 target = gen_rtx_SUBREG (TYPE_MODE (type), target, 0);
2460 SUBREG_PROMOTED_VAR_P (target) = 1;
2461 SUBREG_PROMOTED_UNSIGNED_P (target) = unsignedp;
2463 #endif
2465 /* If size of args is variable or this was a constructor call for a stack
2466 argument, restore saved stack-pointer value. */
2468 if (old_stack_level)
2470 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2471 pending_stack_adjust = old_pending_adj;
2472 #ifdef ACCUMULATE_OUTGOING_ARGS
2473 stack_arg_under_construction = old_stack_arg_under_construction;
2474 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2475 stack_usage_map = initial_stack_usage_map;
2476 #endif
2478 #ifdef ACCUMULATE_OUTGOING_ARGS
2479 else
2481 #ifdef REG_PARM_STACK_SPACE
2482 if (save_area)
2483 restore_fixed_argument_area (save_area, argblock,
2484 high_to_save, low_to_save);
2485 #endif
2487 /* If we saved any argument areas, restore them. */
2488 for (i = 0; i < num_actuals; i++)
2489 if (args[i].save_area)
2491 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2492 rtx stack_area
2493 = gen_rtx_MEM (save_mode,
2494 memory_address (save_mode,
2495 XEXP (args[i].stack_slot, 0)));
2497 if (save_mode != BLKmode)
2498 emit_move_insn (stack_area, args[i].save_area);
2499 else
2500 emit_block_move (stack_area, validize_mem (args[i].save_area),
2501 GEN_INT (args[i].size.constant),
2502 PARM_BOUNDARY / BITS_PER_UNIT);
2505 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2506 stack_usage_map = initial_stack_usage_map;
2508 #endif
2510 /* If this was alloca, record the new stack level for nonlocal gotos.
2511 Check for the handler slots since we might not have a save area
2512 for non-local gotos. */
2514 if (may_be_alloca && nonlocal_goto_handler_slots != 0)
2515 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
2517 pop_temp_slots ();
2519 /* Free up storage we no longer need. */
2520 for (i = 0; i < num_actuals; ++i)
2521 if (args[i].aligned_regs)
2522 free (args[i].aligned_regs);
2524 return target;
2527 /* Output a library call to function FUN (a SYMBOL_REF rtx)
2528 (emitting the queue unless NO_QUEUE is nonzero),
2529 for a value of mode OUTMODE,
2530 with NARGS different arguments, passed as alternating rtx values
2531 and machine_modes to convert them to.
2532 The rtx values should have been passed through protect_from_queue already.
2534 NO_QUEUE will be true if and only if the library call is a `const' call
2535 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
2536 to the variable is_const in expand_call.
2538 NO_QUEUE must be true for const calls, because if it isn't, then
2539 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2540 and will be lost if the libcall sequence is optimized away.
2542 NO_QUEUE must be false for non-const calls, because if it isn't, the
2543 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2544 optimized. For instance, the instruction scheduler may incorrectly
2545 move memory references across the non-const call. */
2547 void
2548 emit_library_call VPROTO((rtx orgfun, int no_queue, enum machine_mode outmode,
2549 int nargs, ...))
2551 #ifndef ANSI_PROTOTYPES
2552 rtx orgfun;
2553 int no_queue;
2554 enum machine_mode outmode;
2555 int nargs;
2556 #endif
2557 va_list p;
2558 /* Total size in bytes of all the stack-parms scanned so far. */
2559 struct args_size args_size;
2560 /* Size of arguments before any adjustments (such as rounding). */
2561 struct args_size original_args_size;
2562 register int argnum;
2563 rtx fun;
2564 int inc;
2565 int count;
2566 rtx argblock = 0;
2567 CUMULATIVE_ARGS args_so_far;
2568 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2569 struct args_size offset; struct args_size size; rtx save_area; };
2570 struct arg *argvec;
2571 int old_inhibit_defer_pop = inhibit_defer_pop;
2572 rtx call_fusage = 0;
2573 int reg_parm_stack_space = 0;
2574 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2575 /* Define the boundary of the register parm stack space that needs to be
2576 save, if any. */
2577 int low_to_save = -1, high_to_save;
2578 rtx save_area = 0; /* Place that it is saved */
2579 #endif
2581 #ifdef ACCUMULATE_OUTGOING_ARGS
2582 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2583 char *initial_stack_usage_map = stack_usage_map;
2584 int needed;
2585 #endif
2587 #ifdef REG_PARM_STACK_SPACE
2588 /* Size of the stack reserved for parameter registers. */
2589 #ifdef MAYBE_REG_PARM_STACK_SPACE
2590 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2591 #else
2592 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
2593 #endif
2594 #endif
2596 VA_START (p, nargs);
2598 #ifndef ANSI_PROTOTYPES
2599 orgfun = va_arg (p, rtx);
2600 no_queue = va_arg (p, int);
2601 outmode = va_arg (p, enum machine_mode);
2602 nargs = va_arg (p, int);
2603 #endif
2605 fun = orgfun;
2607 /* Copy all the libcall-arguments out of the varargs data
2608 and into a vector ARGVEC.
2610 Compute how to pass each argument. We only support a very small subset
2611 of the full argument passing conventions to limit complexity here since
2612 library functions shouldn't have many args. */
2614 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2615 bzero ((char *) argvec, nargs * sizeof (struct arg));
2618 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
2620 args_size.constant = 0;
2621 args_size.var = 0;
2623 push_temp_slots ();
2625 for (count = 0; count < nargs; count++)
2627 rtx val = va_arg (p, rtx);
2628 enum machine_mode mode = va_arg (p, enum machine_mode);
2630 /* We cannot convert the arg value to the mode the library wants here;
2631 must do it earlier where we know the signedness of the arg. */
2632 if (mode == BLKmode
2633 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2634 abort ();
2636 /* On some machines, there's no way to pass a float to a library fcn.
2637 Pass it as a double instead. */
2638 #ifdef LIBGCC_NEEDS_DOUBLE
2639 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2640 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
2641 #endif
2643 /* There's no need to call protect_from_queue, because
2644 either emit_move_insn or emit_push_insn will do that. */
2646 /* Make sure it is a reasonable operand for a move or push insn. */
2647 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2648 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2649 val = force_operand (val, NULL_RTX);
2651 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2652 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2654 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
2655 be viewed as just an efficiency improvement. */
2656 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
2657 emit_move_insn (slot, val);
2658 val = force_operand (XEXP (slot, 0), NULL_RTX);
2659 mode = Pmode;
2661 #endif
2663 argvec[count].value = val;
2664 argvec[count].mode = mode;
2666 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2667 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
2668 abort ();
2669 #ifdef FUNCTION_ARG_PARTIAL_NREGS
2670 argvec[count].partial
2671 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2672 #else
2673 argvec[count].partial = 0;
2674 #endif
2676 locate_and_pad_parm (mode, NULL_TREE,
2677 argvec[count].reg && argvec[count].partial == 0,
2678 NULL_TREE, &args_size, &argvec[count].offset,
2679 &argvec[count].size);
2681 if (argvec[count].size.var)
2682 abort ();
2684 if (reg_parm_stack_space == 0 && argvec[count].partial)
2685 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2687 if (argvec[count].reg == 0 || argvec[count].partial != 0
2688 || reg_parm_stack_space > 0)
2689 args_size.constant += argvec[count].size.constant;
2691 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
2693 va_end (p);
2695 #ifdef FINAL_REG_PARM_STACK_SPACE
2696 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2697 args_size.var);
2698 #endif
2700 /* If this machine requires an external definition for library
2701 functions, write one out. */
2702 assemble_external_libcall (fun);
2704 original_args_size = args_size;
2705 #ifdef PREFERRED_STACK_BOUNDARY
2706 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2707 / STACK_BYTES) * STACK_BYTES);
2708 #endif
2710 args_size.constant = MAX (args_size.constant,
2711 reg_parm_stack_space);
2713 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2714 args_size.constant -= reg_parm_stack_space;
2715 #endif
2717 if (args_size.constant > current_function_outgoing_args_size)
2718 current_function_outgoing_args_size = args_size.constant;
2720 #ifdef ACCUMULATE_OUTGOING_ARGS
2721 /* Since the stack pointer will never be pushed, it is possible for
2722 the evaluation of a parm to clobber something we have already
2723 written to the stack. Since most function calls on RISC machines
2724 do not use the stack, this is uncommon, but must work correctly.
2726 Therefore, we save any area of the stack that was already written
2727 and that we are using. Here we set up to do this by making a new
2728 stack usage map from the old one.
2730 Another approach might be to try to reorder the argument
2731 evaluations to avoid this conflicting stack usage. */
2733 needed = args_size.constant;
2735 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2736 /* Since we will be writing into the entire argument area, the
2737 map must be allocated for its entire size, not just the part that
2738 is the responsibility of the caller. */
2739 needed += reg_parm_stack_space;
2740 #endif
2742 #ifdef ARGS_GROW_DOWNWARD
2743 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2744 needed + 1);
2745 #else
2746 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2747 needed);
2748 #endif
2749 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
2751 if (initial_highest_arg_in_use)
2752 bcopy (initial_stack_usage_map, stack_usage_map,
2753 initial_highest_arg_in_use);
2755 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2756 bzero (&stack_usage_map[initial_highest_arg_in_use],
2757 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
2758 needed = 0;
2760 /* The address of the outgoing argument list must not be copied to a
2761 register here, because argblock would be left pointing to the
2762 wrong place after the call to allocate_dynamic_stack_space below.
2765 argblock = virtual_outgoing_args_rtx;
2766 #else /* not ACCUMULATE_OUTGOING_ARGS */
2767 #ifndef PUSH_ROUNDING
2768 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2769 #endif
2770 #endif
2772 #ifdef PUSH_ARGS_REVERSED
2773 #ifdef PREFERRED_STACK_BOUNDARY
2774 /* If we push args individually in reverse order, perform stack alignment
2775 before the first push (the last arg). */
2776 if (argblock == 0)
2777 anti_adjust_stack (GEN_INT (args_size.constant
2778 - original_args_size.constant));
2779 #endif
2780 #endif
2782 #ifdef PUSH_ARGS_REVERSED
2783 inc = -1;
2784 argnum = nargs - 1;
2785 #else
2786 inc = 1;
2787 argnum = 0;
2788 #endif
2790 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
2791 /* The argument list is the property of the called routine and it
2792 may clobber it. If the fixed area has been used for previous
2793 parameters, we must save and restore it.
2795 Here we compute the boundary of the that needs to be saved, if any. */
2797 #ifdef ARGS_GROW_DOWNWARD
2798 for (count = 0; count < reg_parm_stack_space + 1; count++)
2799 #else
2800 for (count = 0; count < reg_parm_stack_space; count++)
2801 #endif
2803 if (count >= highest_outgoing_arg_in_use
2804 || stack_usage_map[count] == 0)
2805 continue;
2807 if (low_to_save == -1)
2808 low_to_save = count;
2810 high_to_save = count;
2813 if (low_to_save >= 0)
2815 int num_to_save = high_to_save - low_to_save + 1;
2816 enum machine_mode save_mode
2817 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
2818 rtx stack_area;
2820 /* If we don't have the required alignment, must do this in BLKmode. */
2821 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
2822 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
2823 save_mode = BLKmode;
2825 #ifdef ARGS_GROW_DOWNWARD
2826 stack_area = gen_rtx_MEM (save_mode,
2827 memory_address (save_mode,
2828 plus_constant (argblock,
2829 - high_to_save)));
2830 #else
2831 stack_area = gen_rtx_MEM (save_mode,
2832 memory_address (save_mode,
2833 plus_constant (argblock,
2834 low_to_save)));
2835 #endif
2836 if (save_mode == BLKmode)
2838 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
2839 emit_block_move (validize_mem (save_area), stack_area,
2840 GEN_INT (num_to_save),
2841 PARM_BOUNDARY / BITS_PER_UNIT);
2843 else
2845 save_area = gen_reg_rtx (save_mode);
2846 emit_move_insn (save_area, stack_area);
2849 #endif
2851 /* Push the args that need to be pushed. */
2853 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2854 are to be pushed. */
2855 for (count = 0; count < nargs; count++, argnum += inc)
2857 register enum machine_mode mode = argvec[argnum].mode;
2858 register rtx val = argvec[argnum].value;
2859 rtx reg = argvec[argnum].reg;
2860 int partial = argvec[argnum].partial;
2861 #ifdef ACCUMULATE_OUTGOING_ARGS
2862 int lower_bound, upper_bound, i;
2863 #endif
2865 if (! (reg != 0 && partial == 0))
2867 #ifdef ACCUMULATE_OUTGOING_ARGS
2868 /* If this is being stored into a pre-allocated, fixed-size, stack
2869 area, save any previous data at that location. */
2871 #ifdef ARGS_GROW_DOWNWARD
2872 /* stack_slot is negative, but we want to index stack_usage_map
2873 with positive values. */
2874 upper_bound = -argvec[argnum].offset.constant + 1;
2875 lower_bound = upper_bound - argvec[argnum].size.constant;
2876 #else
2877 lower_bound = argvec[argnum].offset.constant;
2878 upper_bound = lower_bound + argvec[argnum].size.constant;
2879 #endif
2881 for (i = lower_bound; i < upper_bound; i++)
2882 if (stack_usage_map[i]
2883 /* Don't store things in the fixed argument area at this point;
2884 it has already been saved. */
2885 && i > reg_parm_stack_space)
2886 break;
2888 if (i != upper_bound)
2890 /* We need to make a save area. See what mode we can make it. */
2891 enum machine_mode save_mode
2892 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
2893 MODE_INT, 1);
2894 rtx stack_area
2895 = gen_rtx_MEM (save_mode,
2896 memory_address (save_mode,
2897 plus_constant (argblock, argvec[argnum].offset.constant)));
2898 argvec[argnum].save_area = gen_reg_rtx (save_mode);
2899 emit_move_insn (argvec[argnum].save_area, stack_area);
2901 #endif
2902 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2903 argblock, GEN_INT (argvec[argnum].offset.constant),
2904 reg_parm_stack_space);
2906 #ifdef ACCUMULATE_OUTGOING_ARGS
2907 /* Now mark the segment we just used. */
2908 for (i = lower_bound; i < upper_bound; i++)
2909 stack_usage_map[i] = 1;
2910 #endif
2912 NO_DEFER_POP;
2916 #ifndef PUSH_ARGS_REVERSED
2917 #ifdef PREFERRED_STACK_BOUNDARY
2918 /* If we pushed args in forward order, perform stack alignment
2919 after pushing the last arg. */
2920 if (argblock == 0)
2921 anti_adjust_stack (GEN_INT (args_size.constant
2922 - original_args_size.constant));
2923 #endif
2924 #endif
2926 #ifdef PUSH_ARGS_REVERSED
2927 argnum = nargs - 1;
2928 #else
2929 argnum = 0;
2930 #endif
2932 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
2934 /* Now load any reg parms into their regs. */
2936 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
2937 are to be pushed. */
2938 for (count = 0; count < nargs; count++, argnum += inc)
2940 register rtx val = argvec[argnum].value;
2941 rtx reg = argvec[argnum].reg;
2942 int partial = argvec[argnum].partial;
2944 if (reg != 0 && partial == 0)
2945 emit_move_insn (reg, val);
2946 NO_DEFER_POP;
2949 /* For version 1.37, try deleting this entirely. */
2950 if (! no_queue)
2951 emit_queue ();
2953 /* Any regs containing parms remain in use through the call. */
2954 for (count = 0; count < nargs; count++)
2955 if (argvec[count].reg != 0)
2956 use_reg (&call_fusage, argvec[count].reg);
2958 /* Don't allow popping to be deferred, since then
2959 cse'ing of library calls could delete a call and leave the pop. */
2960 NO_DEFER_POP;
2962 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2963 will set inhibit_defer_pop to that value. */
2965 /* The return type is needed to decide how many bytes the function pops.
2966 Signedness plays no role in that, so for simplicity, we pretend it's
2967 always signed. We also assume that the list of arguments passed has
2968 no impact, so we pretend it is unknown. */
2970 emit_call_1 (fun,
2971 get_identifier (XSTR (orgfun, 0)),
2972 build_function_type (outmode == VOIDmode ? void_type_node
2973 : type_for_mode (outmode, 0), NULL_TREE),
2974 original_args_size.constant, args_size.constant, 0,
2975 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2976 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2977 old_inhibit_defer_pop + 1, call_fusage, no_queue);
2979 pop_temp_slots ();
2981 /* Now restore inhibit_defer_pop to its actual original value. */
2982 OK_DEFER_POP;
2984 #ifdef ACCUMULATE_OUTGOING_ARGS
2985 #ifdef REG_PARM_STACK_SPACE
2986 if (save_area)
2988 enum machine_mode save_mode = GET_MODE (save_area);
2989 #ifdef ARGS_GROW_DOWNWARD
2990 rtx stack_area
2991 = gen_rtx_MEM (save_mode,
2992 memory_address (save_mode,
2993 plus_constant (argblock,
2994 - high_to_save)));
2995 #else
2996 rtx stack_area
2997 = gen_rtx_MEM (save_mode,
2998 memory_address (save_mode,
2999 plus_constant (argblock, low_to_save)));
3000 #endif
3002 if (save_mode != BLKmode)
3003 emit_move_insn (stack_area, save_area);
3004 else
3005 emit_block_move (stack_area, validize_mem (save_area),
3006 GEN_INT (high_to_save - low_to_save + 1),
3007 PARM_BOUNDARY / BITS_PER_UNIT);
3009 #endif
3011 /* If we saved any argument areas, restore them. */
3012 for (count = 0; count < nargs; count++)
3013 if (argvec[count].save_area)
3015 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3016 rtx stack_area
3017 = gen_rtx_MEM (save_mode,
3018 memory_address (save_mode,
3019 plus_constant (argblock, argvec[count].offset.constant)));
3021 emit_move_insn (stack_area, argvec[count].save_area);
3024 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3025 stack_usage_map = initial_stack_usage_map;
3026 #endif
3029 /* Like emit_library_call except that an extra argument, VALUE,
3030 comes second and says where to store the result.
3031 (If VALUE is zero, this function chooses a convenient way
3032 to return the value.
3034 This function returns an rtx for where the value is to be found.
3035 If VALUE is nonzero, VALUE is returned. */
3038 emit_library_call_value VPROTO((rtx orgfun, rtx value, int no_queue,
3039 enum machine_mode outmode, int nargs, ...))
3041 #ifndef ANSI_PROTOTYPES
3042 rtx orgfun;
3043 rtx value;
3044 int no_queue;
3045 enum machine_mode outmode;
3046 int nargs;
3047 #endif
3048 va_list p;
3049 /* Total size in bytes of all the stack-parms scanned so far. */
3050 struct args_size args_size;
3051 /* Size of arguments before any adjustments (such as rounding). */
3052 struct args_size original_args_size;
3053 register int argnum;
3054 rtx fun;
3055 int inc;
3056 int count;
3057 rtx argblock = 0;
3058 CUMULATIVE_ARGS args_so_far;
3059 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
3060 struct args_size offset; struct args_size size; rtx save_area; };
3061 struct arg *argvec;
3062 int old_inhibit_defer_pop = inhibit_defer_pop;
3063 rtx call_fusage = 0;
3064 rtx mem_value = 0;
3065 int pcc_struct_value = 0;
3066 int struct_value_size = 0;
3067 int is_const;
3068 int reg_parm_stack_space = 0;
3069 #ifdef ACCUMULATE_OUTGOING_ARGS
3070 int needed;
3071 #endif
3073 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3074 /* Define the boundary of the register parm stack space that needs to be
3075 save, if any. */
3076 int low_to_save = -1, high_to_save;
3077 rtx save_area = 0; /* Place that it is saved */
3078 #endif
3080 #ifdef ACCUMULATE_OUTGOING_ARGS
3081 /* Size of the stack reserved for parameter registers. */
3082 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3083 char *initial_stack_usage_map = stack_usage_map;
3084 #endif
3086 #ifdef REG_PARM_STACK_SPACE
3087 #ifdef MAYBE_REG_PARM_STACK_SPACE
3088 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3089 #else
3090 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3091 #endif
3092 #endif
3094 VA_START (p, nargs);
3096 #ifndef ANSI_PROTOTYPES
3097 orgfun = va_arg (p, rtx);
3098 value = va_arg (p, rtx);
3099 no_queue = va_arg (p, int);
3100 outmode = va_arg (p, enum machine_mode);
3101 nargs = va_arg (p, int);
3102 #endif
3104 is_const = no_queue;
3105 fun = orgfun;
3107 /* If this kind of value comes back in memory,
3108 decide where in memory it should come back. */
3109 if (aggregate_value_p (type_for_mode (outmode, 0)))
3111 #ifdef PCC_STATIC_STRUCT_RETURN
3112 rtx pointer_reg
3113 = hard_function_value (build_pointer_type (type_for_mode (outmode, 0)),
3115 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3116 pcc_struct_value = 1;
3117 if (value == 0)
3118 value = gen_reg_rtx (outmode);
3119 #else /* not PCC_STATIC_STRUCT_RETURN */
3120 struct_value_size = GET_MODE_SIZE (outmode);
3121 if (value != 0 && GET_CODE (value) == MEM)
3122 mem_value = value;
3123 else
3124 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
3125 #endif
3127 /* This call returns a big structure. */
3128 is_const = 0;
3131 /* ??? Unfinished: must pass the memory address as an argument. */
3133 /* Copy all the libcall-arguments out of the varargs data
3134 and into a vector ARGVEC.
3136 Compute how to pass each argument. We only support a very small subset
3137 of the full argument passing conventions to limit complexity here since
3138 library functions shouldn't have many args. */
3140 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
3141 bzero ((char *) argvec, (nargs + 1) * sizeof (struct arg));
3143 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3145 args_size.constant = 0;
3146 args_size.var = 0;
3148 count = 0;
3150 push_temp_slots ();
3152 /* If there's a structure value address to be passed,
3153 either pass it in the special place, or pass it as an extra argument. */
3154 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3156 rtx addr = XEXP (mem_value, 0);
3157 nargs++;
3159 /* Make sure it is a reasonable operand for a move or push insn. */
3160 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3161 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3162 addr = force_operand (addr, NULL_RTX);
3164 argvec[count].value = addr;
3165 argvec[count].mode = Pmode;
3166 argvec[count].partial = 0;
3168 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3169 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3170 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3171 abort ();
3172 #endif
3174 locate_and_pad_parm (Pmode, NULL_TREE,
3175 argvec[count].reg && argvec[count].partial == 0,
3176 NULL_TREE, &args_size, &argvec[count].offset,
3177 &argvec[count].size);
3180 if (argvec[count].reg == 0 || argvec[count].partial != 0
3181 || reg_parm_stack_space > 0)
3182 args_size.constant += argvec[count].size.constant;
3184 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3186 count++;
3189 for (; count < nargs; count++)
3191 rtx val = va_arg (p, rtx);
3192 enum machine_mode mode = va_arg (p, enum machine_mode);
3194 /* We cannot convert the arg value to the mode the library wants here;
3195 must do it earlier where we know the signedness of the arg. */
3196 if (mode == BLKmode
3197 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3198 abort ();
3200 /* On some machines, there's no way to pass a float to a library fcn.
3201 Pass it as a double instead. */
3202 #ifdef LIBGCC_NEEDS_DOUBLE
3203 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3204 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3205 #endif
3207 /* There's no need to call protect_from_queue, because
3208 either emit_move_insn or emit_push_insn will do that. */
3210 /* Make sure it is a reasonable operand for a move or push insn. */
3211 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3212 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3213 val = force_operand (val, NULL_RTX);
3215 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3216 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3218 /* We do not support FUNCTION_ARG_CALLEE_COPIES here since it can
3219 be viewed as just an efficiency improvement. */
3220 rtx slot = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
3221 emit_move_insn (slot, val);
3222 val = XEXP (slot, 0);
3223 mode = Pmode;
3225 #endif
3227 argvec[count].value = val;
3228 argvec[count].mode = mode;
3230 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3231 if (argvec[count].reg && GET_CODE (argvec[count].reg) == PARALLEL)
3232 abort ();
3233 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3234 argvec[count].partial
3235 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3236 #else
3237 argvec[count].partial = 0;
3238 #endif
3240 locate_and_pad_parm (mode, NULL_TREE,
3241 argvec[count].reg && argvec[count].partial == 0,
3242 NULL_TREE, &args_size, &argvec[count].offset,
3243 &argvec[count].size);
3245 if (argvec[count].size.var)
3246 abort ();
3248 if (reg_parm_stack_space == 0 && argvec[count].partial)
3249 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3251 if (argvec[count].reg == 0 || argvec[count].partial != 0
3252 || reg_parm_stack_space > 0)
3253 args_size.constant += argvec[count].size.constant;
3255 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3257 va_end (p);
3259 #ifdef FINAL_REG_PARM_STACK_SPACE
3260 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3261 args_size.var);
3262 #endif
3263 /* If this machine requires an external definition for library
3264 functions, write one out. */
3265 assemble_external_libcall (fun);
3267 original_args_size = args_size;
3268 #ifdef PREFERRED_STACK_BOUNDARY
3269 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
3270 / STACK_BYTES) * STACK_BYTES);
3271 #endif
3273 args_size.constant = MAX (args_size.constant,
3274 reg_parm_stack_space);
3276 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3277 args_size.constant -= reg_parm_stack_space;
3278 #endif
3280 if (args_size.constant > current_function_outgoing_args_size)
3281 current_function_outgoing_args_size = args_size.constant;
3283 #ifdef ACCUMULATE_OUTGOING_ARGS
3284 /* Since the stack pointer will never be pushed, it is possible for
3285 the evaluation of a parm to clobber something we have already
3286 written to the stack. Since most function calls on RISC machines
3287 do not use the stack, this is uncommon, but must work correctly.
3289 Therefore, we save any area of the stack that was already written
3290 and that we are using. Here we set up to do this by making a new
3291 stack usage map from the old one.
3293 Another approach might be to try to reorder the argument
3294 evaluations to avoid this conflicting stack usage. */
3296 needed = args_size.constant;
3298 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3299 /* Since we will be writing into the entire argument area, the
3300 map must be allocated for its entire size, not just the part that
3301 is the responsibility of the caller. */
3302 needed += reg_parm_stack_space;
3303 #endif
3305 #ifdef ARGS_GROW_DOWNWARD
3306 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3307 needed + 1);
3308 #else
3309 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3310 needed);
3311 #endif
3312 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3314 if (initial_highest_arg_in_use)
3315 bcopy (initial_stack_usage_map, stack_usage_map,
3316 initial_highest_arg_in_use);
3318 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3319 bzero (&stack_usage_map[initial_highest_arg_in_use],
3320 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3321 needed = 0;
3323 /* The address of the outgoing argument list must not be copied to a
3324 register here, because argblock would be left pointing to the
3325 wrong place after the call to allocate_dynamic_stack_space below.
3328 argblock = virtual_outgoing_args_rtx;
3329 #else /* not ACCUMULATE_OUTGOING_ARGS */
3330 #ifndef PUSH_ROUNDING
3331 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3332 #endif
3333 #endif
3335 #ifdef PUSH_ARGS_REVERSED
3336 #ifdef PREFERRED_STACK_BOUNDARY
3337 /* If we push args individually in reverse order, perform stack alignment
3338 before the first push (the last arg). */
3339 if (argblock == 0)
3340 anti_adjust_stack (GEN_INT (args_size.constant
3341 - original_args_size.constant));
3342 #endif
3343 #endif
3345 #ifdef PUSH_ARGS_REVERSED
3346 inc = -1;
3347 argnum = nargs - 1;
3348 #else
3349 inc = 1;
3350 argnum = 0;
3351 #endif
3353 #if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE)
3354 /* The argument list is the property of the called routine and it
3355 may clobber it. If the fixed area has been used for previous
3356 parameters, we must save and restore it.
3358 Here we compute the boundary of the that needs to be saved, if any. */
3360 #ifdef ARGS_GROW_DOWNWARD
3361 for (count = 0; count < reg_parm_stack_space + 1; count++)
3362 #else
3363 for (count = 0; count < reg_parm_stack_space; count++)
3364 #endif
3366 if (count >= highest_outgoing_arg_in_use
3367 || stack_usage_map[count] == 0)
3368 continue;
3370 if (low_to_save == -1)
3371 low_to_save = count;
3373 high_to_save = count;
3376 if (low_to_save >= 0)
3378 int num_to_save = high_to_save - low_to_save + 1;
3379 enum machine_mode save_mode
3380 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3381 rtx stack_area;
3383 /* If we don't have the required alignment, must do this in BLKmode. */
3384 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3385 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3386 save_mode = BLKmode;
3388 #ifdef ARGS_GROW_DOWNWARD
3389 stack_area = gen_rtx_MEM (save_mode,
3390 memory_address (save_mode,
3391 plus_constant (argblock,
3392 - high_to_save)));
3393 #else
3394 stack_area = gen_rtx_MEM (save_mode,
3395 memory_address (save_mode,
3396 plus_constant (argblock,
3397 low_to_save)));
3398 #endif
3399 if (save_mode == BLKmode)
3401 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3402 emit_block_move (validize_mem (save_area), stack_area,
3403 GEN_INT (num_to_save),
3404 PARM_BOUNDARY / BITS_PER_UNIT);
3406 else
3408 save_area = gen_reg_rtx (save_mode);
3409 emit_move_insn (save_area, stack_area);
3412 #endif
3414 /* Push the args that need to be pushed. */
3416 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3417 are to be pushed. */
3418 for (count = 0; count < nargs; count++, argnum += inc)
3420 register enum machine_mode mode = argvec[argnum].mode;
3421 register rtx val = argvec[argnum].value;
3422 rtx reg = argvec[argnum].reg;
3423 int partial = argvec[argnum].partial;
3424 #ifdef ACCUMULATE_OUTGOING_ARGS
3425 int lower_bound, upper_bound, i;
3426 #endif
3428 if (! (reg != 0 && partial == 0))
3430 #ifdef ACCUMULATE_OUTGOING_ARGS
3431 /* If this is being stored into a pre-allocated, fixed-size, stack
3432 area, save any previous data at that location. */
3434 #ifdef ARGS_GROW_DOWNWARD
3435 /* stack_slot is negative, but we want to index stack_usage_map
3436 with positive values. */
3437 upper_bound = -argvec[argnum].offset.constant + 1;
3438 lower_bound = upper_bound - argvec[argnum].size.constant;
3439 #else
3440 lower_bound = argvec[argnum].offset.constant;
3441 upper_bound = lower_bound + argvec[argnum].size.constant;
3442 #endif
3444 for (i = lower_bound; i < upper_bound; i++)
3445 if (stack_usage_map[i]
3446 /* Don't store things in the fixed argument area at this point;
3447 it has already been saved. */
3448 && i > reg_parm_stack_space)
3449 break;
3451 if (i != upper_bound)
3453 /* We need to make a save area. See what mode we can make it. */
3454 enum machine_mode save_mode
3455 = mode_for_size (argvec[argnum].size.constant * BITS_PER_UNIT,
3456 MODE_INT, 1);
3457 rtx stack_area
3458 = gen_rtx_MEM (save_mode,
3459 memory_address (save_mode,
3460 plus_constant (argblock,
3461 argvec[argnum].offset.constant)));
3462 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3463 emit_move_insn (argvec[argnum].save_area, stack_area);
3465 #endif
3466 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
3467 argblock, GEN_INT (argvec[argnum].offset.constant),
3468 reg_parm_stack_space);
3470 #ifdef ACCUMULATE_OUTGOING_ARGS
3471 /* Now mark the segment we just used. */
3472 for (i = lower_bound; i < upper_bound; i++)
3473 stack_usage_map[i] = 1;
3474 #endif
3476 NO_DEFER_POP;
3480 #ifndef PUSH_ARGS_REVERSED
3481 #ifdef PREFERRED_STACK_BOUNDARY
3482 /* If we pushed args in forward order, perform stack alignment
3483 after pushing the last arg. */
3484 if (argblock == 0)
3485 anti_adjust_stack (GEN_INT (args_size.constant
3486 - original_args_size.constant));
3487 #endif
3488 #endif
3490 #ifdef PUSH_ARGS_REVERSED
3491 argnum = nargs - 1;
3492 #else
3493 argnum = 0;
3494 #endif
3496 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0);
3498 /* Now load any reg parms into their regs. */
3500 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3501 are to be pushed. */
3502 for (count = 0; count < nargs; count++, argnum += inc)
3504 register rtx val = argvec[argnum].value;
3505 rtx reg = argvec[argnum].reg;
3506 int partial = argvec[argnum].partial;
3508 if (reg != 0 && partial == 0)
3509 emit_move_insn (reg, val);
3510 NO_DEFER_POP;
3513 #if 0
3514 /* For version 1.37, try deleting this entirely. */
3515 if (! no_queue)
3516 emit_queue ();
3517 #endif
3519 /* Any regs containing parms remain in use through the call. */
3520 for (count = 0; count < nargs; count++)
3521 if (argvec[count].reg != 0)
3522 use_reg (&call_fusage, argvec[count].reg);
3524 /* Pass the function the address in which to return a structure value. */
3525 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
3527 emit_move_insn (struct_value_rtx,
3528 force_reg (Pmode,
3529 force_operand (XEXP (mem_value, 0),
3530 NULL_RTX)));
3531 if (GET_CODE (struct_value_rtx) == REG)
3532 use_reg (&call_fusage, struct_value_rtx);
3535 /* Don't allow popping to be deferred, since then
3536 cse'ing of library calls could delete a call and leave the pop. */
3537 NO_DEFER_POP;
3539 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3540 will set inhibit_defer_pop to that value. */
3541 /* See the comment in emit_library_call about the function type we build
3542 and pass here. */
3544 emit_call_1 (fun,
3545 get_identifier (XSTR (orgfun, 0)),
3546 build_function_type (type_for_mode (outmode, 0), NULL_TREE),
3547 original_args_size.constant, args_size.constant,
3548 struct_value_size,
3549 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3550 mem_value == 0 ? hard_libcall_value (outmode) : NULL_RTX,
3551 old_inhibit_defer_pop + 1, call_fusage, is_const);
3553 /* Now restore inhibit_defer_pop to its actual original value. */
3554 OK_DEFER_POP;
3556 pop_temp_slots ();
3558 /* Copy the value to the right place. */
3559 if (outmode != VOIDmode)
3561 if (mem_value)
3563 if (value == 0)
3564 value = mem_value;
3565 if (value != mem_value)
3566 emit_move_insn (value, mem_value);
3568 else if (value != 0)
3569 emit_move_insn (value, hard_libcall_value (outmode));
3570 else
3571 value = hard_libcall_value (outmode);
3574 #ifdef ACCUMULATE_OUTGOING_ARGS
3575 #ifdef REG_PARM_STACK_SPACE
3576 if (save_area)
3578 enum machine_mode save_mode = GET_MODE (save_area);
3579 #ifdef ARGS_GROW_DOWNWARD
3580 rtx stack_area
3581 = gen_rtx_MEM (save_mode,
3582 memory_address (save_mode,
3583 plus_constant (argblock,
3584 - high_to_save)));
3585 #else
3586 rtx stack_area
3587 = gen_rtx_MEM (save_mode,
3588 memory_address (save_mode,
3589 plus_constant (argblock, low_to_save)));
3590 #endif
3591 if (save_mode != BLKmode)
3592 emit_move_insn (stack_area, save_area);
3593 else
3594 emit_block_move (stack_area, validize_mem (save_area),
3595 GEN_INT (high_to_save - low_to_save + 1),
3596 PARM_BOUNDARY / BITS_PER_UNIT);
3598 #endif
3600 /* If we saved any argument areas, restore them. */
3601 for (count = 0; count < nargs; count++)
3602 if (argvec[count].save_area)
3604 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3605 rtx stack_area
3606 = gen_rtx_MEM (save_mode,
3607 memory_address (save_mode, plus_constant (argblock,
3608 argvec[count].offset.constant)));
3610 emit_move_insn (stack_area, argvec[count].save_area);
3613 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3614 stack_usage_map = initial_stack_usage_map;
3615 #endif
3617 return value;
3620 #if 0
3621 /* Return an rtx which represents a suitable home on the stack
3622 given TYPE, the type of the argument looking for a home.
3623 This is called only for BLKmode arguments.
3625 SIZE is the size needed for this target.
3626 ARGS_ADDR is the address of the bottom of the argument block for this call.
3627 OFFSET describes this parameter's offset into ARGS_ADDR. It is meaningless
3628 if this machine uses push insns. */
3630 static rtx
3631 target_for_arg (type, size, args_addr, offset)
3632 tree type;
3633 rtx size;
3634 rtx args_addr;
3635 struct args_size offset;
3637 rtx target;
3638 rtx offset_rtx = ARGS_SIZE_RTX (offset);
3640 /* We do not call memory_address if possible,
3641 because we want to address as close to the stack
3642 as possible. For non-variable sized arguments,
3643 this will be stack-pointer relative addressing. */
3644 if (GET_CODE (offset_rtx) == CONST_INT)
3645 target = plus_constant (args_addr, INTVAL (offset_rtx));
3646 else
3648 /* I have no idea how to guarantee that this
3649 will work in the presence of register parameters. */
3650 target = gen_rtx_PLUS (Pmode, args_addr, offset_rtx);
3651 target = memory_address (QImode, target);
3654 return gen_rtx_MEM (BLKmode, target);
3656 #endif
3658 /* Store a single argument for a function call
3659 into the register or memory area where it must be passed.
3660 *ARG describes the argument value and where to pass it.
3662 ARGBLOCK is the address of the stack-block for all the arguments,
3663 or 0 on a machine where arguments are pushed individually.
3665 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3666 so must be careful about how the stack is used.
3668 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3669 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3670 that we need not worry about saving and restoring the stack.
3672 FNDECL is the declaration of the function we are calling. */
3674 static void
3675 store_one_arg (arg, argblock, may_be_alloca, variable_size,
3676 reg_parm_stack_space)
3677 struct arg_data *arg;
3678 rtx argblock;
3679 int may_be_alloca;
3680 int variable_size ATTRIBUTE_UNUSED;
3681 int reg_parm_stack_space;
3683 register tree pval = arg->tree_value;
3684 rtx reg = 0;
3685 int partial = 0;
3686 int used = 0;
3687 #ifdef ACCUMULATE_OUTGOING_ARGS
3688 int i, lower_bound, upper_bound;
3689 #endif
3691 if (TREE_CODE (pval) == ERROR_MARK)
3692 return;
3694 /* Push a new temporary level for any temporaries we make for
3695 this argument. */
3696 push_temp_slots ();
3698 #ifdef ACCUMULATE_OUTGOING_ARGS
3699 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3700 save any previous data at that location. */
3701 if (argblock && ! variable_size && arg->stack)
3703 #ifdef ARGS_GROW_DOWNWARD
3704 /* stack_slot is negative, but we want to index stack_usage_map
3705 with positive values. */
3706 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3707 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3708 else
3709 upper_bound = 0;
3711 lower_bound = upper_bound - arg->size.constant;
3712 #else
3713 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3714 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3715 else
3716 lower_bound = 0;
3718 upper_bound = lower_bound + arg->size.constant;
3719 #endif
3721 for (i = lower_bound; i < upper_bound; i++)
3722 if (stack_usage_map[i]
3723 /* Don't store things in the fixed argument area at this point;
3724 it has already been saved. */
3725 && i > reg_parm_stack_space)
3726 break;
3728 if (i != upper_bound)
3730 /* We need to make a save area. See what mode we can make it. */
3731 enum machine_mode save_mode
3732 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
3733 rtx stack_area
3734 = gen_rtx_MEM (save_mode,
3735 memory_address (save_mode,
3736 XEXP (arg->stack_slot, 0)));
3738 if (save_mode == BLKmode)
3740 arg->save_area = assign_stack_temp (BLKmode,
3741 arg->size.constant, 0);
3742 MEM_SET_IN_STRUCT_P (arg->save_area,
3743 AGGREGATE_TYPE_P (TREE_TYPE
3744 (arg->tree_value)));
3745 preserve_temp_slots (arg->save_area);
3746 emit_block_move (validize_mem (arg->save_area), stack_area,
3747 GEN_INT (arg->size.constant),
3748 PARM_BOUNDARY / BITS_PER_UNIT);
3750 else
3752 arg->save_area = gen_reg_rtx (save_mode);
3753 emit_move_insn (arg->save_area, stack_area);
3758 /* Now that we have saved any slots that will be overwritten by this
3759 store, mark all slots this store will use. We must do this before
3760 we actually expand the argument since the expansion itself may
3761 trigger library calls which might need to use the same stack slot. */
3762 if (argblock && ! variable_size && arg->stack)
3763 for (i = lower_bound; i < upper_bound; i++)
3764 stack_usage_map[i] = 1;
3765 #endif
3767 /* If this isn't going to be placed on both the stack and in registers,
3768 set up the register and number of words. */
3769 if (! arg->pass_on_stack)
3770 reg = arg->reg, partial = arg->partial;
3772 if (reg != 0 && partial == 0)
3773 /* Being passed entirely in a register. We shouldn't be called in
3774 this case. */
3775 abort ();
3777 /* If this arg needs special alignment, don't load the registers
3778 here. */
3779 if (arg->n_aligned_regs != 0)
3780 reg = 0;
3782 /* If this is being passed partially in a register, we can't evaluate
3783 it directly into its stack slot. Otherwise, we can. */
3784 if (arg->value == 0)
3786 #ifdef ACCUMULATE_OUTGOING_ARGS
3787 /* stack_arg_under_construction is nonzero if a function argument is
3788 being evaluated directly into the outgoing argument list and
3789 expand_call must take special action to preserve the argument list
3790 if it is called recursively.
3792 For scalar function arguments stack_usage_map is sufficient to
3793 determine which stack slots must be saved and restored. Scalar
3794 arguments in general have pass_on_stack == 0.
3796 If this argument is initialized by a function which takes the
3797 address of the argument (a C++ constructor or a C function
3798 returning a BLKmode structure), then stack_usage_map is
3799 insufficient and expand_call must push the stack around the
3800 function call. Such arguments have pass_on_stack == 1.
3802 Note that it is always safe to set stack_arg_under_construction,
3803 but this generates suboptimal code if set when not needed. */
3805 if (arg->pass_on_stack)
3806 stack_arg_under_construction++;
3807 #endif
3808 arg->value = expand_expr (pval,
3809 (partial
3810 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
3811 ? NULL_RTX : arg->stack,
3812 VOIDmode, 0);
3814 /* If we are promoting object (or for any other reason) the mode
3815 doesn't agree, convert the mode. */
3817 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
3818 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
3819 arg->value, arg->unsignedp);
3821 #ifdef ACCUMULATE_OUTGOING_ARGS
3822 if (arg->pass_on_stack)
3823 stack_arg_under_construction--;
3824 #endif
3827 /* Don't allow anything left on stack from computation
3828 of argument to alloca. */
3829 if (may_be_alloca)
3830 do_pending_stack_adjust ();
3832 if (arg->value == arg->stack)
3834 /* If the value is already in the stack slot, we are done moving
3835 data. */
3836 if (current_function_check_memory_usage && GET_CODE (arg->stack) == MEM)
3838 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3839 XEXP (arg->stack, 0), Pmode,
3840 ARGS_SIZE_RTX (arg->size),
3841 TYPE_MODE (sizetype),
3842 GEN_INT (MEMORY_USE_RW),
3843 TYPE_MODE (integer_type_node));
3846 else if (arg->mode != BLKmode)
3848 register int size;
3850 /* Argument is a scalar, not entirely passed in registers.
3851 (If part is passed in registers, arg->partial says how much
3852 and emit_push_insn will take care of putting it there.)
3854 Push it, and if its size is less than the
3855 amount of space allocated to it,
3856 also bump stack pointer by the additional space.
3857 Note that in C the default argument promotions
3858 will prevent such mismatches. */
3860 size = GET_MODE_SIZE (arg->mode);
3861 /* Compute how much space the push instruction will push.
3862 On many machines, pushing a byte will advance the stack
3863 pointer by a halfword. */
3864 #ifdef PUSH_ROUNDING
3865 size = PUSH_ROUNDING (size);
3866 #endif
3867 used = size;
3869 /* Compute how much space the argument should get:
3870 round up to a multiple of the alignment for arguments. */
3871 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
3872 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
3873 / (PARM_BOUNDARY / BITS_PER_UNIT))
3874 * (PARM_BOUNDARY / BITS_PER_UNIT));
3876 /* This isn't already where we want it on the stack, so put it there.
3877 This can either be done with push or copy insns. */
3878 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX, 0,
3879 partial, reg, used - size, argblock,
3880 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space);
3882 else
3884 /* BLKmode, at least partly to be pushed. */
3886 register int excess;
3887 rtx size_rtx;
3889 /* Pushing a nonscalar.
3890 If part is passed in registers, PARTIAL says how much
3891 and emit_push_insn will take care of putting it there. */
3893 /* Round its size up to a multiple
3894 of the allocation unit for arguments. */
3896 if (arg->size.var != 0)
3898 excess = 0;
3899 size_rtx = ARGS_SIZE_RTX (arg->size);
3901 else
3903 /* PUSH_ROUNDING has no effect on us, because
3904 emit_push_insn for BLKmode is careful to avoid it. */
3905 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
3906 + partial * UNITS_PER_WORD);
3907 size_rtx = expr_size (pval);
3910 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
3911 TYPE_ALIGN (TREE_TYPE (pval)) / BITS_PER_UNIT, partial,
3912 reg, excess, argblock, ARGS_SIZE_RTX (arg->offset),
3913 reg_parm_stack_space);
3917 /* Unless this is a partially-in-register argument, the argument is now
3918 in the stack.
3920 ??? Note that this can change arg->value from arg->stack to
3921 arg->stack_slot and it matters when they are not the same.
3922 It isn't totally clear that this is correct in all cases. */
3923 if (partial == 0)
3924 arg->value = arg->stack_slot;
3926 /* Once we have pushed something, pops can't safely
3927 be deferred during the rest of the arguments. */
3928 NO_DEFER_POP;
3930 /* ANSI doesn't require a sequence point here,
3931 but PCC has one, so this will avoid some problems. */
3932 emit_queue ();
3934 /* Free any temporary slots made in processing this argument. Show
3935 that we might have taken the address of something and pushed that
3936 as an operand. */
3937 preserve_temp_slots (NULL_RTX);
3938 free_temp_slots ();
3939 pop_temp_slots ();