cselib.c (cselib_current_insn_in_libcall): New static variable.
[official-gcc.git] / gcc / calls.c
blob2be53a85d13d9a65f414fb188a7649e069f4aa3f
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "expr.h"
30 #include "libfuncs.h"
31 #include "function.h"
32 #include "regs.h"
33 #include "toplev.h"
34 #include "output.h"
35 #include "tm_p.h"
36 #include "timevar.h"
37 #include "sbitmap.h"
38 #include "langhooks.h"
39 #include "target.h"
41 /* Decide whether a function's arguments should be processed
42 from first to last or from last to first.
44 They should if the stack and args grow in opposite directions, but
45 only if we have push insns. */
47 #ifdef PUSH_ROUNDING
49 #ifndef PUSH_ARGS_REVERSED
50 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
51 #define PUSH_ARGS_REVERSED PUSH_ARGS
52 #endif
53 #endif
55 #endif
57 #ifndef PUSH_ARGS_REVERSED
58 #define PUSH_ARGS_REVERSED 0
59 #endif
61 #ifndef STACK_POINTER_OFFSET
62 #define STACK_POINTER_OFFSET 0
63 #endif
65 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
66 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
68 /* Data structure and subroutines used within expand_call. */
70 struct arg_data
72 /* Tree node for this argument. */
73 tree tree_value;
74 /* Mode for value; TYPE_MODE unless promoted. */
75 enum machine_mode mode;
76 /* Current RTL value for argument, or 0 if it isn't precomputed. */
77 rtx value;
78 /* Initially-compute RTL value for argument; only for const functions. */
79 rtx initial_value;
80 /* Register to pass this argument in, 0 if passed on stack, or an
81 PARALLEL if the arg is to be copied into multiple non-contiguous
82 registers. */
83 rtx reg;
84 /* Register to pass this argument in when generating tail call sequence.
85 This is not the same register as for normal calls on machines with
86 register windows. */
87 rtx tail_call_reg;
88 /* If REG was promoted from the actual mode of the argument expression,
89 indicates whether the promotion is sign- or zero-extended. */
90 int unsignedp;
91 /* Number of registers to use. 0 means put the whole arg in registers.
92 Also 0 if not passed in registers. */
93 int partial;
94 /* Nonzero if argument must be passed on stack.
95 Note that some arguments may be passed on the stack
96 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
97 pass_on_stack identifies arguments that *cannot* go in registers. */
98 int pass_on_stack;
99 /* Offset of this argument from beginning of stack-args. */
100 struct args_size offset;
101 /* Similar, but offset to the start of the stack slot. Different from
102 OFFSET if this arg pads downward. */
103 struct args_size slot_offset;
104 /* Size of this argument on the stack, rounded up for any padding it gets,
105 parts of the argument passed in registers do not count.
106 If REG_PARM_STACK_SPACE is defined, then register parms
107 are counted here as well. */
108 struct args_size size;
109 /* Location on the stack at which parameter should be stored. The store
110 has already been done if STACK == VALUE. */
111 rtx stack;
112 /* Location on the stack of the start of this argument slot. This can
113 differ from STACK if this arg pads downward. This location is known
114 to be aligned to FUNCTION_ARG_BOUNDARY. */
115 rtx stack_slot;
116 /* Place that this stack area has been saved, if needed. */
117 rtx save_area;
118 /* If an argument's alignment does not permit direct copying into registers,
119 copy in smaller-sized pieces into pseudos. These are stored in a
120 block pointed to by this field. The next field says how many
121 word-sized pseudos we made. */
122 rtx *aligned_regs;
123 int n_aligned_regs;
124 /* The amount that the stack pointer needs to be adjusted to
125 force alignment for the next argument. */
126 struct args_size alignment_pad;
129 /* A vector of one char per byte of stack space. A byte if nonzero if
130 the corresponding stack location has been used.
131 This vector is used to prevent a function call within an argument from
132 clobbering any stack already set up. */
133 static char *stack_usage_map;
135 /* Size of STACK_USAGE_MAP. */
136 static int highest_outgoing_arg_in_use;
138 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
139 stack location's tail call argument has been already stored into the stack.
140 This bitmap is used to prevent sibling call optimization if function tries
141 to use parent's incoming argument slots when they have been already
142 overwritten with tail call arguments. */
143 static sbitmap stored_args_map;
145 /* stack_arg_under_construction is nonzero when an argument may be
146 initialized with a constructor call (including a C function that
147 returns a BLKmode struct) and expand_call must take special action
148 to make sure the object being constructed does not overlap the
149 argument list for the constructor call. */
150 int stack_arg_under_construction;
152 static int calls_function PARAMS ((tree, int));
153 static int calls_function_1 PARAMS ((tree, int));
155 static void emit_call_1 PARAMS ((rtx, tree, tree, HOST_WIDE_INT,
156 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
157 rtx, int, rtx, int,
158 CUMULATIVE_ARGS *));
159 static void precompute_register_parameters PARAMS ((int,
160 struct arg_data *,
161 int *));
162 static int store_one_arg PARAMS ((struct arg_data *, rtx, int, int,
163 int));
164 static void store_unaligned_arguments_into_pseudos PARAMS ((struct arg_data *,
165 int));
166 static int finalize_must_preallocate PARAMS ((int, int,
167 struct arg_data *,
168 struct args_size *));
169 static void precompute_arguments PARAMS ((int, int,
170 struct arg_data *));
171 static int compute_argument_block_size PARAMS ((int,
172 struct args_size *,
173 int));
174 static void initialize_argument_information PARAMS ((int,
175 struct arg_data *,
176 struct args_size *,
177 int, tree, tree,
178 CUMULATIVE_ARGS *,
179 int, rtx *, int *,
180 int *, int *));
181 static void compute_argument_addresses PARAMS ((struct arg_data *,
182 rtx, int));
183 static rtx rtx_for_function_call PARAMS ((tree, tree));
184 static void load_register_parameters PARAMS ((struct arg_data *,
185 int, rtx *, int));
186 static rtx emit_library_call_value_1 PARAMS ((int, rtx, rtx,
187 enum libcall_type,
188 enum machine_mode,
189 int, va_list));
190 static int special_function_p PARAMS ((tree, int));
191 static rtx try_to_integrate PARAMS ((tree, tree, rtx,
192 int, tree, rtx));
193 static int check_sibcall_argument_overlap_1 PARAMS ((rtx));
194 static int check_sibcall_argument_overlap PARAMS ((rtx, struct arg_data *));
196 static int combine_pending_stack_adjustment_and_call
197 PARAMS ((int, struct args_size *, int));
199 #ifdef REG_PARM_STACK_SPACE
200 static rtx save_fixed_argument_area PARAMS ((int, rtx, int *, int *));
201 static void restore_fixed_argument_area PARAMS ((rtx, rtx, int, int));
202 #endif
204 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
205 `alloca'.
207 If WHICH is 0, return 1 if EXP contains a call to any function.
208 Actually, we only need return 1 if evaluating EXP would require pushing
209 arguments on the stack, but that is too difficult to compute, so we just
210 assume any function call might require the stack. */
212 static tree calls_function_save_exprs;
214 static int
215 calls_function (exp, which)
216 tree exp;
217 int which;
219 int val;
221 calls_function_save_exprs = 0;
222 val = calls_function_1 (exp, which);
223 calls_function_save_exprs = 0;
224 return val;
227 /* Recursive function to do the work of above function. */
229 static int
230 calls_function_1 (exp, which)
231 tree exp;
232 int which;
234 int i;
235 enum tree_code code = TREE_CODE (exp);
236 int class = TREE_CODE_CLASS (code);
237 int length = first_rtl_op (code);
239 /* If this code is language-specific, we don't know what it will do. */
240 if ((int) code >= NUM_TREE_CODES)
241 return 1;
243 switch (code)
245 case CALL_EXPR:
246 if (which == 0)
247 return 1;
248 else if ((TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
249 == FUNCTION_TYPE)
250 && (TYPE_RETURNS_STACK_DEPRESSED
251 (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
252 return 1;
253 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
254 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
255 == FUNCTION_DECL)
256 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
258 & ECF_MAY_BE_ALLOCA))
259 return 1;
261 break;
263 case CONSTRUCTOR:
265 tree tem;
267 for (tem = CONSTRUCTOR_ELTS (exp); tem != 0; tem = TREE_CHAIN (tem))
268 if (calls_function_1 (TREE_VALUE (tem), which))
269 return 1;
272 return 0;
274 case SAVE_EXPR:
275 if (SAVE_EXPR_RTL (exp) != 0)
276 return 0;
277 if (value_member (exp, calls_function_save_exprs))
278 return 0;
279 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
280 calls_function_save_exprs);
281 return (TREE_OPERAND (exp, 0) != 0
282 && calls_function_1 (TREE_OPERAND (exp, 0), which));
284 case BLOCK:
286 tree local;
287 tree subblock;
289 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
290 if (DECL_INITIAL (local) != 0
291 && calls_function_1 (DECL_INITIAL (local), which))
292 return 1;
294 for (subblock = BLOCK_SUBBLOCKS (exp);
295 subblock;
296 subblock = TREE_CHAIN (subblock))
297 if (calls_function_1 (subblock, which))
298 return 1;
300 return 0;
302 case TREE_LIST:
303 for (; exp != 0; exp = TREE_CHAIN (exp))
304 if (calls_function_1 (TREE_VALUE (exp), which))
305 return 1;
306 return 0;
308 default:
309 break;
312 /* Only expressions, references, and blocks can contain calls. */
313 if (! IS_EXPR_CODE_CLASS (class) && class != 'r' && class != 'b')
314 return 0;
316 for (i = 0; i < length; i++)
317 if (TREE_OPERAND (exp, i) != 0
318 && calls_function_1 (TREE_OPERAND (exp, i), which))
319 return 1;
321 return 0;
324 /* Force FUNEXP into a form suitable for the address of a CALL,
325 and return that as an rtx. Also load the static chain register
326 if FNDECL is a nested function.
328 CALL_FUSAGE points to a variable holding the prospective
329 CALL_INSN_FUNCTION_USAGE information. */
332 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen, sibcallp)
333 rtx funexp;
334 tree fndecl;
335 rtx *call_fusage;
336 int reg_parm_seen;
337 int sibcallp;
339 rtx static_chain_value = 0;
341 funexp = protect_from_queue (funexp, 0);
343 if (fndecl != 0)
344 /* Get possible static chain value for nested function in C. */
345 static_chain_value = lookup_static_chain (fndecl);
347 /* Make a valid memory address and copy constants thru pseudo-regs,
348 but not for a constant address if -fno-function-cse. */
349 if (GET_CODE (funexp) != SYMBOL_REF)
350 /* If we are using registers for parameters, force the
351 function address into a register now. */
352 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
353 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
354 : memory_address (FUNCTION_MODE, funexp));
355 else if (! sibcallp)
357 #ifndef NO_FUNCTION_CSE
358 if (optimize && ! flag_no_function_cse)
359 #ifdef NO_RECURSIVE_FUNCTION_CSE
360 if (fndecl != current_function_decl)
361 #endif
362 funexp = force_reg (Pmode, funexp);
363 #endif
366 if (static_chain_value != 0)
368 emit_move_insn (static_chain_rtx, static_chain_value);
370 if (GET_CODE (static_chain_rtx) == REG)
371 use_reg (call_fusage, static_chain_rtx);
374 return funexp;
377 /* Generate instructions to call function FUNEXP,
378 and optionally pop the results.
379 The CALL_INSN is the first insn generated.
381 FNDECL is the declaration node of the function. This is given to the
382 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
384 FUNTYPE is the data type of the function. This is given to the macro
385 RETURN_POPS_ARGS to determine whether this function pops its own args.
386 We used to allow an identifier for library functions, but that doesn't
387 work when the return type is an aggregate type and the calling convention
388 says that the pointer to this aggregate is to be popped by the callee.
390 STACK_SIZE is the number of bytes of arguments on the stack,
391 ROUNDED_STACK_SIZE is that number rounded up to
392 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
393 both to put into the call insn and to generate explicit popping
394 code if necessary.
396 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
397 It is zero if this call doesn't want a structure value.
399 NEXT_ARG_REG is the rtx that results from executing
400 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
401 just after all the args have had their registers assigned.
402 This could be whatever you like, but normally it is the first
403 arg-register beyond those used for args in this call,
404 or 0 if all the arg-registers are used in this call.
405 It is passed on to `gen_call' so you can put this info in the call insn.
407 VALREG is a hard register in which a value is returned,
408 or 0 if the call does not return a value.
410 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
411 the args to this call were processed.
412 We restore `inhibit_defer_pop' to that value.
414 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
415 denote registers used by the called function. */
417 static void
418 emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
419 struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
420 call_fusage, ecf_flags, args_so_far)
421 rtx funexp;
422 tree fndecl ATTRIBUTE_UNUSED;
423 tree funtype ATTRIBUTE_UNUSED;
424 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED;
425 HOST_WIDE_INT rounded_stack_size;
426 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED;
427 rtx next_arg_reg ATTRIBUTE_UNUSED;
428 rtx valreg;
429 int old_inhibit_defer_pop;
430 rtx call_fusage;
431 int ecf_flags;
432 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED;
434 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
435 rtx call_insn;
436 int already_popped = 0;
437 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
438 #if defined (HAVE_call) && defined (HAVE_call_value)
439 rtx struct_value_size_rtx;
440 struct_value_size_rtx = GEN_INT (struct_value_size);
441 #endif
443 #ifdef CALL_POPS_ARGS
444 n_popped += CALL_POPS_ARGS (* args_so_far);
445 #endif
447 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
448 and we don't want to load it into a register as an optimization,
449 because prepare_call_address already did it if it should be done. */
450 if (GET_CODE (funexp) != SYMBOL_REF)
451 funexp = memory_address (FUNCTION_MODE, funexp);
453 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
454 if ((ecf_flags & ECF_SIBCALL)
455 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
456 && (n_popped > 0 || stack_size == 0))
458 rtx n_pop = GEN_INT (n_popped);
459 rtx pat;
461 /* If this subroutine pops its own args, record that in the call insn
462 if possible, for the sake of frame pointer elimination. */
464 if (valreg)
465 pat = GEN_SIBCALL_VALUE_POP (valreg,
466 gen_rtx_MEM (FUNCTION_MODE, funexp),
467 rounded_stack_size_rtx, next_arg_reg,
468 n_pop);
469 else
470 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
471 rounded_stack_size_rtx, next_arg_reg, n_pop);
473 emit_call_insn (pat);
474 already_popped = 1;
476 else
477 #endif
479 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
480 /* If the target has "call" or "call_value" insns, then prefer them
481 if no arguments are actually popped. If the target does not have
482 "call" or "call_value" insns, then we must use the popping versions
483 even if the call has no arguments to pop. */
484 #if defined (HAVE_call) && defined (HAVE_call_value)
485 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
486 && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
487 #else
488 if (HAVE_call_pop && HAVE_call_value_pop)
489 #endif
491 rtx n_pop = GEN_INT (n_popped);
492 rtx pat;
494 /* If this subroutine pops its own args, record that in the call insn
495 if possible, for the sake of frame pointer elimination. */
497 if (valreg)
498 pat = GEN_CALL_VALUE_POP (valreg,
499 gen_rtx_MEM (FUNCTION_MODE, funexp),
500 rounded_stack_size_rtx, next_arg_reg, n_pop);
501 else
502 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
503 rounded_stack_size_rtx, next_arg_reg, n_pop);
505 emit_call_insn (pat);
506 already_popped = 1;
508 else
509 #endif
511 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
512 if ((ecf_flags & ECF_SIBCALL)
513 && HAVE_sibcall && HAVE_sibcall_value)
515 if (valreg)
516 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
517 gen_rtx_MEM (FUNCTION_MODE, funexp),
518 rounded_stack_size_rtx,
519 next_arg_reg, NULL_RTX));
520 else
521 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
522 rounded_stack_size_rtx, next_arg_reg,
523 struct_value_size_rtx));
525 else
526 #endif
528 #if defined (HAVE_call) && defined (HAVE_call_value)
529 if (HAVE_call && HAVE_call_value)
531 if (valreg)
532 emit_call_insn (GEN_CALL_VALUE (valreg,
533 gen_rtx_MEM (FUNCTION_MODE, funexp),
534 rounded_stack_size_rtx, next_arg_reg,
535 NULL_RTX));
536 else
537 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
538 rounded_stack_size_rtx, next_arg_reg,
539 struct_value_size_rtx));
541 else
542 #endif
543 abort ();
545 /* Find the CALL insn we just emitted. */
546 for (call_insn = get_last_insn ();
547 call_insn && GET_CODE (call_insn) != CALL_INSN;
548 call_insn = PREV_INSN (call_insn))
551 if (! call_insn)
552 abort ();
554 /* Mark memory as used for "pure" function call. */
555 if (ecf_flags & ECF_PURE)
556 call_fusage
557 = gen_rtx_EXPR_LIST
558 (VOIDmode,
559 gen_rtx_USE (VOIDmode,
560 gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
561 call_fusage);
563 /* Put the register usage information on the CALL. If there is already
564 some usage information, put ours at the end. */
565 if (CALL_INSN_FUNCTION_USAGE (call_insn))
567 rtx link;
569 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
570 link = XEXP (link, 1))
573 XEXP (link, 1) = call_fusage;
575 else
576 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
578 /* If this is a const call, then set the insn's unchanging bit. */
579 if (ecf_flags & (ECF_CONST | ECF_PURE))
580 CONST_OR_PURE_CALL_P (call_insn) = 1;
582 /* If this call can't throw, attach a REG_EH_REGION reg note to that
583 effect. */
584 if (ecf_flags & ECF_NOTHROW)
585 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
586 REG_NOTES (call_insn));
588 if (ecf_flags & ECF_NORETURN)
589 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
590 REG_NOTES (call_insn));
591 if (ecf_flags & ECF_ALWAYS_RETURN)
592 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_ALWAYS_RETURN, const0_rtx,
593 REG_NOTES (call_insn));
595 if (ecf_flags & ECF_RETURNS_TWICE)
597 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
598 REG_NOTES (call_insn));
599 current_function_calls_setjmp = 1;
602 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
604 /* Restore this now, so that we do defer pops for this call's args
605 if the context of the call as a whole permits. */
606 inhibit_defer_pop = old_inhibit_defer_pop;
608 if (n_popped > 0)
610 if (!already_popped)
611 CALL_INSN_FUNCTION_USAGE (call_insn)
612 = gen_rtx_EXPR_LIST (VOIDmode,
613 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
614 CALL_INSN_FUNCTION_USAGE (call_insn));
615 rounded_stack_size -= n_popped;
616 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
617 stack_pointer_delta -= n_popped;
620 if (!ACCUMULATE_OUTGOING_ARGS)
622 /* If returning from the subroutine does not automatically pop the args,
623 we need an instruction to pop them sooner or later.
624 Perhaps do it now; perhaps just record how much space to pop later.
626 If returning from the subroutine does pop the args, indicate that the
627 stack pointer will be changed. */
629 if (rounded_stack_size != 0)
631 if (ecf_flags & ECF_SP_DEPRESSED)
632 /* Just pretend we did the pop. */
633 stack_pointer_delta -= rounded_stack_size;
634 else if (flag_defer_pop && inhibit_defer_pop == 0
635 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
636 pending_stack_adjust += rounded_stack_size;
637 else
638 adjust_stack (rounded_stack_size_rtx);
641 /* When we accumulate outgoing args, we must avoid any stack manipulations.
642 Restore the stack pointer to its original value now. Usually
643 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
644 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
645 popping variants of functions exist as well.
647 ??? We may optimize similar to defer_pop above, but it is
648 probably not worthwhile.
650 ??? It will be worthwhile to enable combine_stack_adjustments even for
651 such machines. */
652 else if (n_popped)
653 anti_adjust_stack (GEN_INT (n_popped));
656 /* Determine if the function identified by NAME and FNDECL is one with
657 special properties we wish to know about.
659 For example, if the function might return more than one time (setjmp), then
660 set RETURNS_TWICE to a nonzero value.
662 Similarly set LONGJMP for if the function is in the longjmp family.
664 Set MALLOC for any of the standard memory allocation functions which
665 allocate from the heap.
667 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
668 space from the stack such as alloca. */
670 static int
671 special_function_p (fndecl, flags)
672 tree fndecl;
673 int flags;
675 if (! (flags & ECF_MALLOC)
676 && fndecl && DECL_NAME (fndecl)
677 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
678 /* Exclude functions not at the file scope, or not `extern',
679 since they are not the magic functions we would otherwise
680 think they are. */
681 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
683 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
684 const char *tname = name;
686 /* We assume that alloca will always be called by name. It
687 makes no sense to pass it as a pointer-to-function to
688 anything that does not understand its behavior. */
689 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
690 && name[0] == 'a'
691 && ! strcmp (name, "alloca"))
692 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
693 && name[0] == '_'
694 && ! strcmp (name, "__builtin_alloca"))))
695 flags |= ECF_MAY_BE_ALLOCA;
697 /* Disregard prefix _, __ or __x. */
698 if (name[0] == '_')
700 if (name[1] == '_' && name[2] == 'x')
701 tname += 3;
702 else if (name[1] == '_')
703 tname += 2;
704 else
705 tname += 1;
708 if (tname[0] == 's')
710 if ((tname[1] == 'e'
711 && (! strcmp (tname, "setjmp")
712 || ! strcmp (tname, "setjmp_syscall")))
713 || (tname[1] == 'i'
714 && ! strcmp (tname, "sigsetjmp"))
715 || (tname[1] == 'a'
716 && ! strcmp (tname, "savectx")))
717 flags |= ECF_RETURNS_TWICE;
719 if (tname[1] == 'i'
720 && ! strcmp (tname, "siglongjmp"))
721 flags |= ECF_LONGJMP;
723 else if ((tname[0] == 'q' && tname[1] == 's'
724 && ! strcmp (tname, "qsetjmp"))
725 || (tname[0] == 'v' && tname[1] == 'f'
726 && ! strcmp (tname, "vfork")))
727 flags |= ECF_RETURNS_TWICE;
729 else if (tname[0] == 'l' && tname[1] == 'o'
730 && ! strcmp (tname, "longjmp"))
731 flags |= ECF_LONGJMP;
733 else if ((tname[0] == 'f' && tname[1] == 'o'
734 && ! strcmp (tname, "fork"))
735 /* Linux specific: __clone. check NAME to insist on the
736 leading underscores, to avoid polluting the ISO / POSIX
737 namespace. */
738 || (name[0] == '_' && name[1] == '_'
739 && ! strcmp (tname, "clone"))
740 || (tname[0] == 'e' && tname[1] == 'x' && tname[2] == 'e'
741 && tname[3] == 'c' && (tname[4] == 'l' || tname[4] == 'v')
742 && (tname[5] == '\0'
743 || ((tname[5] == 'p' || tname[5] == 'e')
744 && tname[6] == '\0'))))
745 flags |= ECF_FORK_OR_EXEC;
747 /* Do not add any more malloc-like functions to this list,
748 instead mark them as malloc functions using the malloc attribute.
749 Note, realloc is not suitable for attribute malloc since
750 it may return the same address across multiple calls.
751 C++ operator new is not suitable because it is not required
752 to return a unique pointer; indeed, the standard placement new
753 just returns its argument. */
754 else if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == Pmode
755 && (! strcmp (tname, "malloc")
756 || ! strcmp (tname, "calloc")
757 || ! strcmp (tname, "strdup")))
758 flags |= ECF_MALLOC;
760 return flags;
763 /* Return nonzero when tree represent call to longjmp. */
766 setjmp_call_p (fndecl)
767 tree fndecl;
769 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
772 /* Return true when exp contains alloca call. */
773 bool
774 alloca_call_p (exp)
775 tree exp;
777 if (TREE_CODE (exp) == CALL_EXPR
778 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
779 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
780 == FUNCTION_DECL)
781 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
782 0) & ECF_MAY_BE_ALLOCA))
783 return true;
784 return false;
787 /* Detect flags (function attributes) from the function decl or type node. */
790 flags_from_decl_or_type (exp)
791 tree exp;
793 int flags = 0;
794 tree type = exp;
795 /* ??? We can't set IS_MALLOC for function types? */
796 if (DECL_P (exp))
798 type = TREE_TYPE (exp);
800 /* The function exp may have the `malloc' attribute. */
801 if (DECL_P (exp) && DECL_IS_MALLOC (exp))
802 flags |= ECF_MALLOC;
804 /* The function exp may have the `pure' attribute. */
805 if (DECL_P (exp) && DECL_IS_PURE (exp))
806 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
808 if (TREE_NOTHROW (exp))
809 flags |= ECF_NOTHROW;
812 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
813 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
815 if (TREE_THIS_VOLATILE (exp))
816 flags |= ECF_NORETURN;
818 /* Mark if the function returns with the stack pointer depressed. We
819 cannot consider it pure or constant in that case. */
820 if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
822 flags |= ECF_SP_DEPRESSED;
823 flags &= ~(ECF_PURE | ECF_CONST | ECF_LIBCALL_BLOCK);
826 return flags;
829 /* Precompute all register parameters as described by ARGS, storing values
830 into fields within the ARGS array.
832 NUM_ACTUALS indicates the total number elements in the ARGS array.
834 Set REG_PARM_SEEN if we encounter a register parameter. */
836 static void
837 precompute_register_parameters (num_actuals, args, reg_parm_seen)
838 int num_actuals;
839 struct arg_data *args;
840 int *reg_parm_seen;
842 int i;
844 *reg_parm_seen = 0;
846 for (i = 0; i < num_actuals; i++)
847 if (args[i].reg != 0 && ! args[i].pass_on_stack)
849 *reg_parm_seen = 1;
851 if (args[i].value == 0)
853 push_temp_slots ();
854 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
855 VOIDmode, 0);
856 preserve_temp_slots (args[i].value);
857 pop_temp_slots ();
859 /* ANSI doesn't require a sequence point here,
860 but PCC has one, so this will avoid some problems. */
861 emit_queue ();
864 /* If the value is a non-legitimate constant, force it into a
865 pseudo now. TLS symbols sometimes need a call to resolve. */
866 if (CONSTANT_P (args[i].value)
867 && !LEGITIMATE_CONSTANT_P (args[i].value))
868 args[i].value = force_reg (args[i].mode, args[i].value);
870 /* If we are to promote the function arg to a wider mode,
871 do it now. */
873 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
874 args[i].value
875 = convert_modes (args[i].mode,
876 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
877 args[i].value, args[i].unsignedp);
879 /* If the value is expensive, and we are inside an appropriately
880 short loop, put the value into a pseudo and then put the pseudo
881 into the hard reg.
883 For small register classes, also do this if this call uses
884 register parameters. This is to avoid reload conflicts while
885 loading the parameters registers. */
887 if ((! (GET_CODE (args[i].value) == REG
888 || (GET_CODE (args[i].value) == SUBREG
889 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
890 && args[i].mode != BLKmode
891 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
892 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
893 || preserve_subexpressions_p ()))
894 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
898 #ifdef REG_PARM_STACK_SPACE
900 /* The argument list is the property of the called routine and it
901 may clobber it. If the fixed area has been used for previous
902 parameters, we must save and restore it. */
904 static rtx
905 save_fixed_argument_area (reg_parm_stack_space, argblock,
906 low_to_save, high_to_save)
907 int reg_parm_stack_space;
908 rtx argblock;
909 int *low_to_save;
910 int *high_to_save;
912 int i;
913 rtx save_area = NULL_RTX;
915 /* Compute the boundary of the that needs to be saved, if any. */
916 #ifdef ARGS_GROW_DOWNWARD
917 for (i = 0; i < reg_parm_stack_space + 1; i++)
918 #else
919 for (i = 0; i < reg_parm_stack_space; i++)
920 #endif
922 if (i >= highest_outgoing_arg_in_use
923 || stack_usage_map[i] == 0)
924 continue;
926 if (*low_to_save == -1)
927 *low_to_save = i;
929 *high_to_save = i;
932 if (*low_to_save >= 0)
934 int num_to_save = *high_to_save - *low_to_save + 1;
935 enum machine_mode save_mode
936 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
937 rtx stack_area;
939 /* If we don't have the required alignment, must do this in BLKmode. */
940 if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
941 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
942 save_mode = BLKmode;
944 #ifdef ARGS_GROW_DOWNWARD
945 stack_area
946 = gen_rtx_MEM (save_mode,
947 memory_address (save_mode,
948 plus_constant (argblock,
949 - *high_to_save)));
950 #else
951 stack_area = gen_rtx_MEM (save_mode,
952 memory_address (save_mode,
953 plus_constant (argblock,
954 *low_to_save)));
955 #endif
957 set_mem_align (stack_area, PARM_BOUNDARY);
958 if (save_mode == BLKmode)
960 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
961 emit_block_move (validize_mem (save_area), stack_area,
962 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
964 else
966 save_area = gen_reg_rtx (save_mode);
967 emit_move_insn (save_area, stack_area);
971 return save_area;
974 static void
975 restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
976 rtx save_area;
977 rtx argblock;
978 int high_to_save;
979 int low_to_save;
981 enum machine_mode save_mode = GET_MODE (save_area);
982 #ifdef ARGS_GROW_DOWNWARD
983 rtx stack_area
984 = gen_rtx_MEM (save_mode,
985 memory_address (save_mode,
986 plus_constant (argblock,
987 - high_to_save)));
988 #else
989 rtx stack_area
990 = gen_rtx_MEM (save_mode,
991 memory_address (save_mode,
992 plus_constant (argblock,
993 low_to_save)));
994 #endif
996 if (save_mode != BLKmode)
997 emit_move_insn (stack_area, save_area);
998 else
999 emit_block_move (stack_area, validize_mem (save_area),
1000 GEN_INT (high_to_save - low_to_save + 1),
1001 BLOCK_OP_CALL_PARM);
1003 #endif /* REG_PARM_STACK_SPACE */
1005 /* If any elements in ARGS refer to parameters that are to be passed in
1006 registers, but not in memory, and whose alignment does not permit a
1007 direct copy into registers. Copy the values into a group of pseudos
1008 which we will later copy into the appropriate hard registers.
1010 Pseudos for each unaligned argument will be stored into the array
1011 args[argnum].aligned_regs. The caller is responsible for deallocating
1012 the aligned_regs array if it is nonzero. */
1014 static void
1015 store_unaligned_arguments_into_pseudos (args, num_actuals)
1016 struct arg_data *args;
1017 int num_actuals;
1019 int i, j;
1021 for (i = 0; i < num_actuals; i++)
1022 if (args[i].reg != 0 && ! args[i].pass_on_stack
1023 && args[i].mode == BLKmode
1024 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1025 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1027 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1028 int big_endian_correction = 0;
1030 args[i].n_aligned_regs
1031 = args[i].partial ? args[i].partial
1032 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1034 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
1035 * args[i].n_aligned_regs);
1037 /* Structures smaller than a word are aligned to the least
1038 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
1039 this means we must skip the empty high order bytes when
1040 calculating the bit offset. */
1041 if (BYTES_BIG_ENDIAN
1042 && bytes < UNITS_PER_WORD)
1043 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
1045 for (j = 0; j < args[i].n_aligned_regs; j++)
1047 rtx reg = gen_reg_rtx (word_mode);
1048 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1049 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1051 args[i].aligned_regs[j] = reg;
1053 /* There is no need to restrict this code to loading items
1054 in TYPE_ALIGN sized hunks. The bitfield instructions can
1055 load up entire word sized registers efficiently.
1057 ??? This may not be needed anymore.
1058 We use to emit a clobber here but that doesn't let later
1059 passes optimize the instructions we emit. By storing 0 into
1060 the register later passes know the first AND to zero out the
1061 bitfield being set in the register is unnecessary. The store
1062 of 0 will be deleted as will at least the first AND. */
1064 emit_move_insn (reg, const0_rtx);
1066 bytes -= bitsize / BITS_PER_UNIT;
1067 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
1068 extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1069 word_mode, word_mode,
1070 BITS_PER_WORD),
1071 BITS_PER_WORD);
1076 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1077 ACTPARMS.
1079 NUM_ACTUALS is the total number of parameters.
1081 N_NAMED_ARGS is the total number of named arguments.
1083 FNDECL is the tree code for the target of this call (if known)
1085 ARGS_SO_FAR holds state needed by the target to know where to place
1086 the next argument.
1088 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1089 for arguments which are passed in registers.
1091 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1092 and may be modified by this routine.
1094 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1095 flags which may may be modified by this routine. */
1097 static void
1098 initialize_argument_information (num_actuals, args, args_size, n_named_args,
1099 actparms, fndecl, args_so_far,
1100 reg_parm_stack_space, old_stack_level,
1101 old_pending_adj, must_preallocate,
1102 ecf_flags)
1103 int num_actuals ATTRIBUTE_UNUSED;
1104 struct arg_data *args;
1105 struct args_size *args_size;
1106 int n_named_args ATTRIBUTE_UNUSED;
1107 tree actparms;
1108 tree fndecl;
1109 CUMULATIVE_ARGS *args_so_far;
1110 int reg_parm_stack_space;
1111 rtx *old_stack_level;
1112 int *old_pending_adj;
1113 int *must_preallocate;
1114 int *ecf_flags;
1116 /* 1 if scanning parms front to back, -1 if scanning back to front. */
1117 int inc;
1119 /* Count arg position in order args appear. */
1120 int argpos;
1122 struct args_size alignment_pad;
1123 int i;
1124 tree p;
1126 args_size->constant = 0;
1127 args_size->var = 0;
1129 /* In this loop, we consider args in the order they are written.
1130 We fill up ARGS from the front or from the back if necessary
1131 so that in any case the first arg to be pushed ends up at the front. */
1133 if (PUSH_ARGS_REVERSED)
1135 i = num_actuals - 1, inc = -1;
1136 /* In this case, must reverse order of args
1137 so that we compute and push the last arg first. */
1139 else
1141 i = 0, inc = 1;
1144 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1145 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
1147 tree type = TREE_TYPE (TREE_VALUE (p));
1148 int unsignedp;
1149 enum machine_mode mode;
1151 args[i].tree_value = TREE_VALUE (p);
1153 /* Replace erroneous argument with constant zero. */
1154 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1155 args[i].tree_value = integer_zero_node, type = integer_type_node;
1157 /* If TYPE is a transparent union, pass things the way we would
1158 pass the first field of the union. We have already verified that
1159 the modes are the same. */
1160 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
1161 type = TREE_TYPE (TYPE_FIELDS (type));
1163 /* Decide where to pass this arg.
1165 args[i].reg is nonzero if all or part is passed in registers.
1167 args[i].partial is nonzero if part but not all is passed in registers,
1168 and the exact value says how many words are passed in registers.
1170 args[i].pass_on_stack is nonzero if the argument must at least be
1171 computed on the stack. It may then be loaded back into registers
1172 if args[i].reg is nonzero.
1174 These decisions are driven by the FUNCTION_... macros and must agree
1175 with those made by function.c. */
1177 /* See if this argument should be passed by invisible reference. */
1178 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1179 && contains_placeholder_p (TYPE_SIZE (type)))
1180 || TREE_ADDRESSABLE (type)
1181 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1182 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
1183 type, argpos < n_named_args)
1184 #endif
1187 /* If we're compiling a thunk, pass through invisible
1188 references instead of making a copy. */
1189 if (current_function_is_thunk
1190 #ifdef FUNCTION_ARG_CALLEE_COPIES
1191 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
1192 type, argpos < n_named_args)
1193 /* If it's in a register, we must make a copy of it too. */
1194 /* ??? Is this a sufficient test? Is there a better one? */
1195 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1196 && REG_P (DECL_RTL (args[i].tree_value)))
1197 && ! TREE_ADDRESSABLE (type))
1198 #endif
1201 /* C++ uses a TARGET_EXPR to indicate that we want to make a
1202 new object from the argument. If we are passing by
1203 invisible reference, the callee will do that for us, so we
1204 can strip off the TARGET_EXPR. This is not always safe,
1205 but it is safe in the only case where this is a useful
1206 optimization; namely, when the argument is a plain object.
1207 In that case, the frontend is just asking the backend to
1208 make a bitwise copy of the argument. */
1210 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
1211 && (DECL_P (TREE_OPERAND (args[i].tree_value, 1)))
1212 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1213 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1215 args[i].tree_value = build1 (ADDR_EXPR,
1216 build_pointer_type (type),
1217 args[i].tree_value);
1218 type = build_pointer_type (type);
1220 else if (TREE_CODE (args[i].tree_value) == TARGET_EXPR)
1222 /* In the V3 C++ ABI, parameters are destroyed in the caller.
1223 We implement this by passing the address of the temporary
1224 rather than expanding it into another allocated slot. */
1225 args[i].tree_value = build1 (ADDR_EXPR,
1226 build_pointer_type (type),
1227 args[i].tree_value);
1228 type = build_pointer_type (type);
1230 else
1232 /* We make a copy of the object and pass the address to the
1233 function being called. */
1234 rtx copy;
1236 if (!COMPLETE_TYPE_P (type)
1237 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1238 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1239 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1240 STACK_CHECK_MAX_VAR_SIZE))))
1242 /* This is a variable-sized object. Make space on the stack
1243 for it. */
1244 rtx size_rtx = expr_size (TREE_VALUE (p));
1246 if (*old_stack_level == 0)
1248 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1249 *old_pending_adj = pending_stack_adjust;
1250 pending_stack_adjust = 0;
1253 copy = gen_rtx_MEM (BLKmode,
1254 allocate_dynamic_stack_space
1255 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1256 set_mem_attributes (copy, type, 1);
1258 else
1259 copy = assign_temp (type, 0, 1, 0);
1261 store_expr (args[i].tree_value, copy, 0);
1262 *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1264 args[i].tree_value = build1 (ADDR_EXPR,
1265 build_pointer_type (type),
1266 make_tree (type, copy));
1267 type = build_pointer_type (type);
1271 mode = TYPE_MODE (type);
1272 unsignedp = TREE_UNSIGNED (type);
1274 #ifdef PROMOTE_FUNCTION_ARGS
1275 mode = promote_mode (type, mode, &unsignedp, 1);
1276 #endif
1278 args[i].unsignedp = unsignedp;
1279 args[i].mode = mode;
1281 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1282 argpos < n_named_args);
1283 #ifdef FUNCTION_INCOMING_ARG
1284 /* If this is a sibling call and the machine has register windows, the
1285 register window has to be unwinded before calling the routine, so
1286 arguments have to go into the incoming registers. */
1287 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1288 argpos < n_named_args);
1289 #else
1290 args[i].tail_call_reg = args[i].reg;
1291 #endif
1293 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1294 if (args[i].reg)
1295 args[i].partial
1296 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1297 argpos < n_named_args);
1298 #endif
1300 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1302 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1303 it means that we are to pass this arg in the register(s) designated
1304 by the PARALLEL, but also to pass it in the stack. */
1305 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1306 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1307 args[i].pass_on_stack = 1;
1309 /* If this is an addressable type, we must preallocate the stack
1310 since we must evaluate the object into its final location.
1312 If this is to be passed in both registers and the stack, it is simpler
1313 to preallocate. */
1314 if (TREE_ADDRESSABLE (type)
1315 || (args[i].pass_on_stack && args[i].reg != 0))
1316 *must_preallocate = 1;
1318 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1319 we cannot consider this function call constant. */
1320 if (TREE_ADDRESSABLE (type))
1321 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1323 /* Compute the stack-size of this argument. */
1324 if (args[i].reg == 0 || args[i].partial != 0
1325 || reg_parm_stack_space > 0
1326 || args[i].pass_on_stack)
1327 locate_and_pad_parm (mode, type,
1328 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1330 #else
1331 args[i].reg != 0,
1332 #endif
1333 fndecl, args_size, &args[i].offset,
1334 &args[i].size, &alignment_pad);
1336 #ifndef ARGS_GROW_DOWNWARD
1337 args[i].slot_offset = *args_size;
1338 #endif
1340 args[i].alignment_pad = alignment_pad;
1342 /* If a part of the arg was put into registers,
1343 don't include that part in the amount pushed. */
1344 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1345 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1346 / (PARM_BOUNDARY / BITS_PER_UNIT)
1347 * (PARM_BOUNDARY / BITS_PER_UNIT));
1349 /* Update ARGS_SIZE, the total stack space for args so far. */
1351 args_size->constant += args[i].size.constant;
1352 if (args[i].size.var)
1354 ADD_PARM_SIZE (*args_size, args[i].size.var);
1357 /* Since the slot offset points to the bottom of the slot,
1358 we must record it after incrementing if the args grow down. */
1359 #ifdef ARGS_GROW_DOWNWARD
1360 args[i].slot_offset = *args_size;
1362 args[i].slot_offset.constant = -args_size->constant;
1363 if (args_size->var)
1364 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
1365 #endif
1367 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1368 have been used, etc. */
1370 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1371 argpos < n_named_args);
1375 /* Update ARGS_SIZE to contain the total size for the argument block.
1376 Return the original constant component of the argument block's size.
1378 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1379 for arguments passed in registers. */
1381 static int
1382 compute_argument_block_size (reg_parm_stack_space, args_size,
1383 preferred_stack_boundary)
1384 int reg_parm_stack_space;
1385 struct args_size *args_size;
1386 int preferred_stack_boundary ATTRIBUTE_UNUSED;
1388 int unadjusted_args_size = args_size->constant;
1390 /* For accumulate outgoing args mode we don't need to align, since the frame
1391 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1392 backends from generating misaligned frame sizes. */
1393 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1394 preferred_stack_boundary = STACK_BOUNDARY;
1396 /* Compute the actual size of the argument block required. The variable
1397 and constant sizes must be combined, the size may have to be rounded,
1398 and there may be a minimum required size. */
1400 if (args_size->var)
1402 args_size->var = ARGS_SIZE_TREE (*args_size);
1403 args_size->constant = 0;
1405 preferred_stack_boundary /= BITS_PER_UNIT;
1406 if (preferred_stack_boundary > 1)
1408 /* We don't handle this case yet. To handle it correctly we have
1409 to add the delta, round and subtract the delta.
1410 Currently no machine description requires this support. */
1411 if (stack_pointer_delta & (preferred_stack_boundary - 1))
1412 abort ();
1413 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1416 if (reg_parm_stack_space > 0)
1418 args_size->var
1419 = size_binop (MAX_EXPR, args_size->var,
1420 ssize_int (reg_parm_stack_space));
1422 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1423 /* The area corresponding to register parameters is not to count in
1424 the size of the block we need. So make the adjustment. */
1425 args_size->var
1426 = size_binop (MINUS_EXPR, args_size->var,
1427 ssize_int (reg_parm_stack_space));
1428 #endif
1431 else
1433 preferred_stack_boundary /= BITS_PER_UNIT;
1434 if (preferred_stack_boundary < 1)
1435 preferred_stack_boundary = 1;
1436 args_size->constant = (((args_size->constant
1437 + stack_pointer_delta
1438 + preferred_stack_boundary - 1)
1439 / preferred_stack_boundary
1440 * preferred_stack_boundary)
1441 - stack_pointer_delta);
1443 args_size->constant = MAX (args_size->constant,
1444 reg_parm_stack_space);
1446 #ifdef MAYBE_REG_PARM_STACK_SPACE
1447 if (reg_parm_stack_space == 0)
1448 args_size->constant = 0;
1449 #endif
1451 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1452 args_size->constant -= reg_parm_stack_space;
1453 #endif
1455 return unadjusted_args_size;
1458 /* Precompute parameters as needed for a function call.
1460 FLAGS is mask of ECF_* constants.
1462 NUM_ACTUALS is the number of arguments.
1464 ARGS is an array containing information for each argument; this
1465 routine fills in the INITIAL_VALUE and VALUE fields for each
1466 precomputed argument. */
1468 static void
1469 precompute_arguments (flags, num_actuals, args)
1470 int flags;
1471 int num_actuals;
1472 struct arg_data *args;
1474 int i;
1476 /* If this function call is cse'able, precompute all the parameters.
1477 Note that if the parameter is constructed into a temporary, this will
1478 cause an additional copy because the parameter will be constructed
1479 into a temporary location and then copied into the outgoing arguments.
1480 If a parameter contains a call to alloca and this function uses the
1481 stack, precompute the parameter. */
1483 /* If we preallocated the stack space, and some arguments must be passed
1484 on the stack, then we must precompute any parameter which contains a
1485 function call which will store arguments on the stack.
1486 Otherwise, evaluating the parameter may clobber previous parameters
1487 which have already been stored into the stack. (we have code to avoid
1488 such case by saving the outgoing stack arguments, but it results in
1489 worse code) */
1491 for (i = 0; i < num_actuals; i++)
1492 if ((flags & ECF_LIBCALL_BLOCK)
1493 || calls_function (args[i].tree_value, !ACCUMULATE_OUTGOING_ARGS))
1495 enum machine_mode mode;
1497 /* If this is an addressable type, we cannot pre-evaluate it. */
1498 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1499 abort ();
1501 args[i].value
1502 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1504 /* ANSI doesn't require a sequence point here,
1505 but PCC has one, so this will avoid some problems. */
1506 emit_queue ();
1508 args[i].initial_value = args[i].value
1509 = protect_from_queue (args[i].value, 0);
1511 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1512 if (mode != args[i].mode)
1514 args[i].value
1515 = convert_modes (args[i].mode, mode,
1516 args[i].value, args[i].unsignedp);
1517 #ifdef PROMOTE_FOR_CALL_ONLY
1518 /* CSE will replace this only if it contains args[i].value
1519 pseudo, so convert it down to the declared mode using
1520 a SUBREG. */
1521 if (GET_CODE (args[i].value) == REG
1522 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1524 args[i].initial_value
1525 = gen_lowpart_SUBREG (mode, args[i].value);
1526 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1527 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1528 args[i].unsignedp);
1530 #endif
1535 /* Given the current state of MUST_PREALLOCATE and information about
1536 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1537 compute and return the final value for MUST_PREALLOCATE. */
1539 static int
1540 finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1541 int must_preallocate;
1542 int num_actuals;
1543 struct arg_data *args;
1544 struct args_size *args_size;
1546 /* See if we have or want to preallocate stack space.
1548 If we would have to push a partially-in-regs parm
1549 before other stack parms, preallocate stack space instead.
1551 If the size of some parm is not a multiple of the required stack
1552 alignment, we must preallocate.
1554 If the total size of arguments that would otherwise create a copy in
1555 a temporary (such as a CALL) is more than half the total argument list
1556 size, preallocation is faster.
1558 Another reason to preallocate is if we have a machine (like the m88k)
1559 where stack alignment is required to be maintained between every
1560 pair of insns, not just when the call is made. However, we assume here
1561 that such machines either do not have push insns (and hence preallocation
1562 would occur anyway) or the problem is taken care of with
1563 PUSH_ROUNDING. */
1565 if (! must_preallocate)
1567 int partial_seen = 0;
1568 int copy_to_evaluate_size = 0;
1569 int i;
1571 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1573 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1574 partial_seen = 1;
1575 else if (partial_seen && args[i].reg == 0)
1576 must_preallocate = 1;
1578 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1579 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1580 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1581 || TREE_CODE (args[i].tree_value) == COND_EXPR
1582 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1583 copy_to_evaluate_size
1584 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1587 if (copy_to_evaluate_size * 2 >= args_size->constant
1588 && args_size->constant > 0)
1589 must_preallocate = 1;
1591 return must_preallocate;
1594 /* If we preallocated stack space, compute the address of each argument
1595 and store it into the ARGS array.
1597 We need not ensure it is a valid memory address here; it will be
1598 validized when it is used.
1600 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1602 static void
1603 compute_argument_addresses (args, argblock, num_actuals)
1604 struct arg_data *args;
1605 rtx argblock;
1606 int num_actuals;
1608 if (argblock)
1610 rtx arg_reg = argblock;
1611 int i, arg_offset = 0;
1613 if (GET_CODE (argblock) == PLUS)
1614 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1616 for (i = 0; i < num_actuals; i++)
1618 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1619 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1620 rtx addr;
1622 /* Skip this parm if it will not be passed on the stack. */
1623 if (! args[i].pass_on_stack && args[i].reg != 0)
1624 continue;
1626 if (GET_CODE (offset) == CONST_INT)
1627 addr = plus_constant (arg_reg, INTVAL (offset));
1628 else
1629 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1631 addr = plus_constant (addr, arg_offset);
1632 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1633 set_mem_attributes (args[i].stack,
1634 TREE_TYPE (args[i].tree_value), 1);
1636 if (GET_CODE (slot_offset) == CONST_INT)
1637 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1638 else
1639 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1641 addr = plus_constant (addr, arg_offset);
1642 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1643 set_mem_attributes (args[i].stack_slot,
1644 TREE_TYPE (args[i].tree_value), 1);
1646 /* Function incoming arguments may overlap with sibling call
1647 outgoing arguments and we cannot allow reordering of reads
1648 from function arguments with stores to outgoing arguments
1649 of sibling calls. */
1650 set_mem_alias_set (args[i].stack, 0);
1651 set_mem_alias_set (args[i].stack_slot, 0);
1656 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1657 in a call instruction.
1659 FNDECL is the tree node for the target function. For an indirect call
1660 FNDECL will be NULL_TREE.
1662 EXP is the CALL_EXPR for this call. */
1664 static rtx
1665 rtx_for_function_call (fndecl, exp)
1666 tree fndecl;
1667 tree exp;
1669 rtx funexp;
1671 /* Get the function to call, in the form of RTL. */
1672 if (fndecl)
1674 /* If this is the first use of the function, see if we need to
1675 make an external definition for it. */
1676 if (! TREE_USED (fndecl))
1678 assemble_external (fndecl);
1679 TREE_USED (fndecl) = 1;
1682 /* Get a SYMBOL_REF rtx for the function address. */
1683 funexp = XEXP (DECL_RTL (fndecl), 0);
1685 else
1686 /* Generate an rtx (probably a pseudo-register) for the address. */
1688 push_temp_slots ();
1689 funexp = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
1690 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1691 emit_queue ();
1693 return funexp;
1696 /* Do the register loads required for any wholly-register parms or any
1697 parms which are passed both on the stack and in a register. Their
1698 expressions were already evaluated.
1700 Mark all register-parms as living through the call, putting these USE
1701 insns in the CALL_INSN_FUNCTION_USAGE field. */
1703 static void
1704 load_register_parameters (args, num_actuals, call_fusage, flags)
1705 struct arg_data *args;
1706 int num_actuals;
1707 rtx *call_fusage;
1708 int flags;
1710 int i, j;
1712 #ifdef LOAD_ARGS_REVERSED
1713 for (i = num_actuals - 1; i >= 0; i--)
1714 #else
1715 for (i = 0; i < num_actuals; i++)
1716 #endif
1718 rtx reg = ((flags & ECF_SIBCALL)
1719 ? args[i].tail_call_reg : args[i].reg);
1720 int partial = args[i].partial;
1721 int nregs;
1723 if (reg)
1725 /* Set to non-negative if must move a word at a time, even if just
1726 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1727 we just use a normal move insn. This value can be zero if the
1728 argument is a zero size structure with no fields. */
1729 nregs = (partial ? partial
1730 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1731 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1732 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1733 : -1));
1735 /* Handle calls that pass values in multiple non-contiguous
1736 locations. The Irix 6 ABI has examples of this. */
1738 if (GET_CODE (reg) == PARALLEL)
1739 emit_group_load (reg, args[i].value,
1740 int_size_in_bytes (TREE_TYPE (args[i].tree_value)));
1742 /* If simple case, just do move. If normal partial, store_one_arg
1743 has already loaded the register for us. In all other cases,
1744 load the register(s) from memory. */
1746 else if (nregs == -1)
1747 emit_move_insn (reg, args[i].value);
1749 /* If we have pre-computed the values to put in the registers in
1750 the case of non-aligned structures, copy them in now. */
1752 else if (args[i].n_aligned_regs != 0)
1753 for (j = 0; j < args[i].n_aligned_regs; j++)
1754 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1755 args[i].aligned_regs[j]);
1757 else if (partial == 0 || args[i].pass_on_stack)
1758 move_block_to_reg (REGNO (reg),
1759 validize_mem (args[i].value), nregs,
1760 args[i].mode);
1762 /* Handle calls that pass values in multiple non-contiguous
1763 locations. The Irix 6 ABI has examples of this. */
1764 if (GET_CODE (reg) == PARALLEL)
1765 use_group_regs (call_fusage, reg);
1766 else if (nregs == -1)
1767 use_reg (call_fusage, reg);
1768 else
1769 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1774 /* Try to integrate function. See expand_inline_function for documentation
1775 about the parameters. */
1777 static rtx
1778 try_to_integrate (fndecl, actparms, target, ignore, type, structure_value_addr)
1779 tree fndecl;
1780 tree actparms;
1781 rtx target;
1782 int ignore;
1783 tree type;
1784 rtx structure_value_addr;
1786 rtx temp;
1787 rtx before_call;
1788 int i;
1789 rtx old_stack_level = 0;
1790 int reg_parm_stack_space = 0;
1792 #ifdef REG_PARM_STACK_SPACE
1793 #ifdef MAYBE_REG_PARM_STACK_SPACE
1794 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1795 #else
1796 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1797 #endif
1798 #endif
1800 before_call = get_last_insn ();
1802 timevar_push (TV_INTEGRATION);
1804 temp = expand_inline_function (fndecl, actparms, target,
1805 ignore, type,
1806 structure_value_addr);
1808 timevar_pop (TV_INTEGRATION);
1810 /* If inlining succeeded, return. */
1811 if (temp != (rtx) (size_t) - 1)
1813 if (ACCUMULATE_OUTGOING_ARGS)
1815 /* If the outgoing argument list must be preserved, push
1816 the stack before executing the inlined function if it
1817 makes any calls. */
1819 for (i = reg_parm_stack_space - 1; i >= 0; i--)
1820 if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1821 break;
1823 if (stack_arg_under_construction || i >= 0)
1825 rtx first_insn
1826 = before_call ? NEXT_INSN (before_call) : get_insns ();
1827 rtx insn = NULL_RTX, seq;
1829 /* Look for a call in the inline function code.
1830 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1831 nonzero then there is a call and it is not necessary
1832 to scan the insns. */
1834 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
1835 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1836 if (GET_CODE (insn) == CALL_INSN)
1837 break;
1839 if (insn)
1841 /* Reserve enough stack space so that the largest
1842 argument list of any function call in the inline
1843 function does not overlap the argument list being
1844 evaluated. This is usually an overestimate because
1845 allocate_dynamic_stack_space reserves space for an
1846 outgoing argument list in addition to the requested
1847 space, but there is no way to ask for stack space such
1848 that an argument list of a certain length can be
1849 safely constructed.
1851 Add the stack space reserved for register arguments, if
1852 any, in the inline function. What is really needed is the
1853 largest value of reg_parm_stack_space in the inline
1854 function, but that is not available. Using the current
1855 value of reg_parm_stack_space is wrong, but gives
1856 correct results on all supported machines. */
1858 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
1859 + reg_parm_stack_space);
1861 start_sequence ();
1862 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1863 allocate_dynamic_stack_space (GEN_INT (adjust),
1864 NULL_RTX, BITS_PER_UNIT);
1865 seq = get_insns ();
1866 end_sequence ();
1867 emit_insn_before (seq, first_insn);
1868 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1873 /* If the result is equivalent to TARGET, return TARGET to simplify
1874 checks in store_expr. They can be equivalent but not equal in the
1875 case of a function that returns BLKmode. */
1876 if (temp != target && rtx_equal_p (temp, target))
1877 return target;
1878 return temp;
1881 /* If inlining failed, mark FNDECL as needing to be compiled
1882 separately after all. If function was declared inline,
1883 give a warning. */
1884 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1885 && optimize > 0 && !TREE_ADDRESSABLE (fndecl))
1887 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1888 warning ("called from here");
1890 (*lang_hooks.mark_addressable) (fndecl);
1891 return (rtx) (size_t) - 1;
1894 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1895 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1896 bytes, then we would need to push some additional bytes to pad the
1897 arguments. So, we compute an adjust to the stack pointer for an
1898 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1899 bytes. Then, when the arguments are pushed the stack will be perfectly
1900 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1901 be popped after the call. Returns the adjustment. */
1903 static int
1904 combine_pending_stack_adjustment_and_call (unadjusted_args_size,
1905 args_size,
1906 preferred_unit_stack_boundary)
1907 int unadjusted_args_size;
1908 struct args_size *args_size;
1909 int preferred_unit_stack_boundary;
1911 /* The number of bytes to pop so that the stack will be
1912 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1913 HOST_WIDE_INT adjustment;
1914 /* The alignment of the stack after the arguments are pushed, if we
1915 just pushed the arguments without adjust the stack here. */
1916 HOST_WIDE_INT unadjusted_alignment;
1918 unadjusted_alignment
1919 = ((stack_pointer_delta + unadjusted_args_size)
1920 % preferred_unit_stack_boundary);
1922 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1923 as possible -- leaving just enough left to cancel out the
1924 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1925 PENDING_STACK_ADJUST is non-negative, and congruent to
1926 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1928 /* Begin by trying to pop all the bytes. */
1929 unadjusted_alignment
1930 = (unadjusted_alignment
1931 - (pending_stack_adjust % preferred_unit_stack_boundary));
1932 adjustment = pending_stack_adjust;
1933 /* Push enough additional bytes that the stack will be aligned
1934 after the arguments are pushed. */
1935 if (preferred_unit_stack_boundary > 1)
1937 if (unadjusted_alignment > 0)
1938 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1939 else
1940 adjustment += unadjusted_alignment;
1943 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1944 bytes after the call. The right number is the entire
1945 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1946 by the arguments in the first place. */
1947 args_size->constant
1948 = pending_stack_adjust - adjustment + unadjusted_args_size;
1950 return adjustment;
1953 /* Scan X expression if it does not dereference any argument slots
1954 we already clobbered by tail call arguments (as noted in stored_args_map
1955 bitmap).
1956 Return nonzero if X expression dereferences such argument slots,
1957 zero otherwise. */
1959 static int
1960 check_sibcall_argument_overlap_1 (x)
1961 rtx x;
1963 RTX_CODE code;
1964 int i, j;
1965 unsigned int k;
1966 const char *fmt;
1968 if (x == NULL_RTX)
1969 return 0;
1971 code = GET_CODE (x);
1973 if (code == MEM)
1975 if (XEXP (x, 0) == current_function_internal_arg_pointer)
1976 i = 0;
1977 else if (GET_CODE (XEXP (x, 0)) == PLUS
1978 && XEXP (XEXP (x, 0), 0) ==
1979 current_function_internal_arg_pointer
1980 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
1981 i = INTVAL (XEXP (XEXP (x, 0), 1));
1982 else
1983 return 0;
1985 #ifdef ARGS_GROW_DOWNWARD
1986 i = -i - GET_MODE_SIZE (GET_MODE (x));
1987 #endif
1989 for (k = 0; k < GET_MODE_SIZE (GET_MODE (x)); k++)
1990 if (i + k < stored_args_map->n_bits
1991 && TEST_BIT (stored_args_map, i + k))
1992 return 1;
1994 return 0;
1997 /* Scan all subexpressions. */
1998 fmt = GET_RTX_FORMAT (code);
1999 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2001 if (*fmt == 'e')
2003 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
2004 return 1;
2006 else if (*fmt == 'E')
2008 for (j = 0; j < XVECLEN (x, i); j++)
2009 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
2010 return 1;
2013 return 0;
2016 /* Scan sequence after INSN if it does not dereference any argument slots
2017 we already clobbered by tail call arguments (as noted in stored_args_map
2018 bitmap). Add stack slots for ARG to stored_args_map bitmap afterwards.
2019 Return nonzero if sequence after INSN dereferences such argument slots,
2020 zero otherwise. */
2022 static int
2023 check_sibcall_argument_overlap (insn, arg)
2024 rtx insn;
2025 struct arg_data *arg;
2027 int low, high;
2029 if (insn == NULL_RTX)
2030 insn = get_insns ();
2031 else
2032 insn = NEXT_INSN (insn);
2034 for (; insn; insn = NEXT_INSN (insn))
2035 if (INSN_P (insn)
2036 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
2037 break;
2039 #ifdef ARGS_GROW_DOWNWARD
2040 low = -arg->slot_offset.constant - arg->size.constant;
2041 #else
2042 low = arg->slot_offset.constant;
2043 #endif
2045 for (high = low + arg->size.constant; low < high; low++)
2046 SET_BIT (stored_args_map, low);
2047 return insn != NULL_RTX;
2050 /* Generate all the code for a function call
2051 and return an rtx for its value.
2052 Store the value in TARGET (specified as an rtx) if convenient.
2053 If the value is stored in TARGET then TARGET is returned.
2054 If IGNORE is nonzero, then we ignore the value of the function call. */
2057 expand_call (exp, target, ignore)
2058 tree exp;
2059 rtx target;
2060 int ignore;
2062 /* Nonzero if we are currently expanding a call. */
2063 static int currently_expanding_call = 0;
2065 /* List of actual parameters. */
2066 tree actparms = TREE_OPERAND (exp, 1);
2067 /* RTX for the function to be called. */
2068 rtx funexp;
2069 /* Sequence of insns to perform a tail recursive "call". */
2070 rtx tail_recursion_insns = NULL_RTX;
2071 /* Sequence of insns to perform a normal "call". */
2072 rtx normal_call_insns = NULL_RTX;
2073 /* Sequence of insns to perform a tail recursive "call". */
2074 rtx tail_call_insns = NULL_RTX;
2075 /* Data type of the function. */
2076 tree funtype;
2077 /* Declaration of the function being called,
2078 or 0 if the function is computed (not known by name). */
2079 tree fndecl = 0;
2080 rtx insn;
2081 int try_tail_call = 1;
2082 int try_tail_recursion = 1;
2083 int pass;
2085 /* Register in which non-BLKmode value will be returned,
2086 or 0 if no value or if value is BLKmode. */
2087 rtx valreg;
2088 /* Address where we should return a BLKmode value;
2089 0 if value not BLKmode. */
2090 rtx structure_value_addr = 0;
2091 /* Nonzero if that address is being passed by treating it as
2092 an extra, implicit first parameter. Otherwise,
2093 it is passed by being copied directly into struct_value_rtx. */
2094 int structure_value_addr_parm = 0;
2095 /* Size of aggregate value wanted, or zero if none wanted
2096 or if we are using the non-reentrant PCC calling convention
2097 or expecting the value in registers. */
2098 HOST_WIDE_INT struct_value_size = 0;
2099 /* Nonzero if called function returns an aggregate in memory PCC style,
2100 by returning the address of where to find it. */
2101 int pcc_struct_value = 0;
2103 /* Number of actual parameters in this call, including struct value addr. */
2104 int num_actuals;
2105 /* Number of named args. Args after this are anonymous ones
2106 and they must all go on the stack. */
2107 int n_named_args;
2109 /* Vector of information about each argument.
2110 Arguments are numbered in the order they will be pushed,
2111 not the order they are written. */
2112 struct arg_data *args;
2114 /* Total size in bytes of all the stack-parms scanned so far. */
2115 struct args_size args_size;
2116 struct args_size adjusted_args_size;
2117 /* Size of arguments before any adjustments (such as rounding). */
2118 int unadjusted_args_size;
2119 /* Data on reg parms scanned so far. */
2120 CUMULATIVE_ARGS args_so_far;
2121 /* Nonzero if a reg parm has been scanned. */
2122 int reg_parm_seen;
2123 /* Nonzero if this is an indirect function call. */
2125 /* Nonzero if we must avoid push-insns in the args for this call.
2126 If stack space is allocated for register parameters, but not by the
2127 caller, then it is preallocated in the fixed part of the stack frame.
2128 So the entire argument block must then be preallocated (i.e., we
2129 ignore PUSH_ROUNDING in that case). */
2131 int must_preallocate = !PUSH_ARGS;
2133 /* Size of the stack reserved for parameter registers. */
2134 int reg_parm_stack_space = 0;
2136 /* Address of space preallocated for stack parms
2137 (on machines that lack push insns), or 0 if space not preallocated. */
2138 rtx argblock = 0;
2140 /* Mask of ECF_ flags. */
2141 int flags = 0;
2142 /* Nonzero if this is a call to an inline function. */
2143 int is_integrable = 0;
2144 #ifdef REG_PARM_STACK_SPACE
2145 /* Define the boundary of the register parm stack space that needs to be
2146 save, if any. */
2147 int low_to_save = -1, high_to_save;
2148 rtx save_area = 0; /* Place that it is saved */
2149 #endif
2151 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2152 char *initial_stack_usage_map = stack_usage_map;
2153 int old_stack_arg_under_construction = 0;
2155 rtx old_stack_level = 0;
2156 int old_pending_adj = 0;
2157 int old_inhibit_defer_pop = inhibit_defer_pop;
2158 int old_stack_allocated;
2159 rtx call_fusage;
2160 tree p = TREE_OPERAND (exp, 0);
2161 int i;
2162 /* The alignment of the stack, in bits. */
2163 HOST_WIDE_INT preferred_stack_boundary;
2164 /* The alignment of the stack, in bytes. */
2165 HOST_WIDE_INT preferred_unit_stack_boundary;
2167 /* See if this is "nothrow" function call. */
2168 if (TREE_NOTHROW (exp))
2169 flags |= ECF_NOTHROW;
2171 /* See if we can find a DECL-node for the actual function.
2172 As a result, decide whether this is a call to an integrable function. */
2174 fndecl = get_callee_fndecl (exp);
2175 if (fndecl)
2177 if (!flag_no_inline
2178 && fndecl != current_function_decl
2179 && DECL_INLINE (fndecl)
2180 && DECL_SAVED_INSNS (fndecl)
2181 && DECL_SAVED_INSNS (fndecl)->inlinable)
2182 is_integrable = 1;
2183 else if (! TREE_ADDRESSABLE (fndecl))
2185 /* In case this function later becomes inlinable,
2186 record that there was already a non-inline call to it.
2188 Use abstraction instead of setting TREE_ADDRESSABLE
2189 directly. */
2190 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
2191 && optimize > 0)
2193 warning_with_decl (fndecl, "can't inline call to `%s'");
2194 warning ("called from here");
2196 (*lang_hooks.mark_addressable) (fndecl);
2199 flags |= flags_from_decl_or_type (fndecl);
2202 /* If we don't have specific function to call, see if we have a
2203 attributes set in the type. */
2204 else
2205 flags |= flags_from_decl_or_type (TREE_TYPE (TREE_TYPE (p)));
2207 #ifdef REG_PARM_STACK_SPACE
2208 #ifdef MAYBE_REG_PARM_STACK_SPACE
2209 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2210 #else
2211 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2212 #endif
2213 #endif
2215 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2216 if (reg_parm_stack_space > 0 && PUSH_ARGS)
2217 must_preallocate = 1;
2218 #endif
2220 /* Warn if this value is an aggregate type,
2221 regardless of which calling convention we are using for it. */
2222 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
2223 warning ("function call has aggregate value");
2225 /* Set up a place to return a structure. */
2227 /* Cater to broken compilers. */
2228 if (aggregate_value_p (exp))
2230 /* This call returns a big structure. */
2231 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
2233 #ifdef PCC_STATIC_STRUCT_RETURN
2235 pcc_struct_value = 1;
2236 /* Easier than making that case work right. */
2237 if (is_integrable)
2239 /* In case this is a static function, note that it has been
2240 used. */
2241 if (! TREE_ADDRESSABLE (fndecl))
2242 (*lang_hooks.mark_addressable) (fndecl);
2243 is_integrable = 0;
2246 #else /* not PCC_STATIC_STRUCT_RETURN */
2248 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2250 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (exp))
2252 /* The structure value address arg is already in actparms.
2253 Pull it out. It might be nice to just leave it there, but
2254 we need to set structure_value_addr. */
2255 tree return_arg = TREE_VALUE (actparms);
2256 actparms = TREE_CHAIN (actparms);
2257 structure_value_addr = expand_expr (return_arg, NULL_RTX,
2258 VOIDmode, EXPAND_NORMAL);
2260 else if (target && GET_CODE (target) == MEM)
2261 structure_value_addr = XEXP (target, 0);
2262 else
2264 /* For variable-sized objects, we must be called with a target
2265 specified. If we were to allocate space on the stack here,
2266 we would have no way of knowing when to free it. */
2267 rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
2269 mark_temp_addr_taken (d);
2270 structure_value_addr = XEXP (d, 0);
2271 target = 0;
2274 #endif /* not PCC_STATIC_STRUCT_RETURN */
2277 /* If called function is inline, try to integrate it. */
2279 if (is_integrable)
2281 rtx temp = try_to_integrate (fndecl, actparms, target,
2282 ignore, TREE_TYPE (exp),
2283 structure_value_addr);
2284 if (temp != (rtx) (size_t) - 1)
2285 return temp;
2288 /* Figure out the amount to which the stack should be aligned. */
2289 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2291 /* Operand 0 is a pointer-to-function; get the type of the function. */
2292 funtype = TREE_TYPE (TREE_OPERAND (exp, 0));
2293 if (! POINTER_TYPE_P (funtype))
2294 abort ();
2295 funtype = TREE_TYPE (funtype);
2297 /* See if this is a call to a function that can return more than once
2298 or a call to longjmp or malloc. */
2299 flags |= special_function_p (fndecl, flags);
2301 if (flags & ECF_MAY_BE_ALLOCA)
2302 current_function_calls_alloca = 1;
2304 /* If struct_value_rtx is 0, it means pass the address
2305 as if it were an extra parameter. */
2306 if (structure_value_addr && struct_value_rtx == 0)
2308 /* If structure_value_addr is a REG other than
2309 virtual_outgoing_args_rtx, we can use always use it. If it
2310 is not a REG, we must always copy it into a register.
2311 If it is virtual_outgoing_args_rtx, we must copy it to another
2312 register in some cases. */
2313 rtx temp = (GET_CODE (structure_value_addr) != REG
2314 || (ACCUMULATE_OUTGOING_ARGS
2315 && stack_arg_under_construction
2316 && structure_value_addr == virtual_outgoing_args_rtx)
2317 ? copy_addr_to_reg (structure_value_addr)
2318 : structure_value_addr);
2320 actparms
2321 = tree_cons (error_mark_node,
2322 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2323 temp),
2324 actparms);
2325 structure_value_addr_parm = 1;
2328 /* Count the arguments and set NUM_ACTUALS. */
2329 for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2330 num_actuals++;
2332 /* Compute number of named args.
2333 Normally, don't include the last named arg if anonymous args follow.
2334 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
2335 (If no anonymous args follow, the result of list_length is actually
2336 one too large. This is harmless.)
2338 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
2339 zero, this machine will be able to place unnamed args that were
2340 passed in registers into the stack. So treat all args as named.
2341 This allows the insns emitting for a specific argument list to be
2342 independent of the function declaration.
2344 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any
2345 reliable way to pass unnamed args in registers, so we must force
2346 them into memory. */
2348 if ((STRICT_ARGUMENT_NAMING
2349 || ! PRETEND_OUTGOING_VARARGS_NAMED)
2350 && TYPE_ARG_TYPES (funtype) != 0)
2351 n_named_args
2352 = (list_length (TYPE_ARG_TYPES (funtype))
2353 /* Don't include the last named arg. */
2354 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
2355 /* Count the struct value address, if it is passed as a parm. */
2356 + structure_value_addr_parm);
2357 else
2358 /* If we know nothing, treat all args as named. */
2359 n_named_args = num_actuals;
2361 /* Start updating where the next arg would go.
2363 On some machines (such as the PA) indirect calls have a different
2364 calling convention than normal calls. The last argument in
2365 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2366 or not. */
2367 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
2369 /* Make a vector to hold all the information about each arg. */
2370 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
2371 memset ((char *) args, 0, num_actuals * sizeof (struct arg_data));
2373 /* Build up entries in the ARGS array, compute the size of the
2374 arguments into ARGS_SIZE, etc. */
2375 initialize_argument_information (num_actuals, args, &args_size,
2376 n_named_args, actparms, fndecl,
2377 &args_so_far, reg_parm_stack_space,
2378 &old_stack_level, &old_pending_adj,
2379 &must_preallocate, &flags);
2381 if (args_size.var)
2383 /* If this function requires a variable-sized argument list, don't
2384 try to make a cse'able block for this call. We may be able to
2385 do this eventually, but it is too complicated to keep track of
2386 what insns go in the cse'able block and which don't. */
2388 flags &= ~ECF_LIBCALL_BLOCK;
2389 must_preallocate = 1;
2392 /* Now make final decision about preallocating stack space. */
2393 must_preallocate = finalize_must_preallocate (must_preallocate,
2394 num_actuals, args,
2395 &args_size);
2397 /* If the structure value address will reference the stack pointer, we
2398 must stabilize it. We don't need to do this if we know that we are
2399 not going to adjust the stack pointer in processing this call. */
2401 if (structure_value_addr
2402 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2403 || reg_mentioned_p (virtual_outgoing_args_rtx,
2404 structure_value_addr))
2405 && (args_size.var
2406 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2407 structure_value_addr = copy_to_reg (structure_value_addr);
2409 /* Tail calls can make things harder to debug, and we're traditionally
2410 pushed these optimizations into -O2. Don't try if we're already
2411 expanding a call, as that means we're an argument. Don't try if
2412 there's cleanups, as we know there's code to follow the call.
2414 If rtx_equal_function_value_matters is false, that means we've
2415 finished with regular parsing. Which means that some of the
2416 machinery we use to generate tail-calls is no longer in place.
2417 This is most often true of sjlj-exceptions, which we couldn't
2418 tail-call to anyway. */
2420 if (currently_expanding_call++ != 0
2421 || !flag_optimize_sibling_calls
2422 || !rtx_equal_function_value_matters
2423 || any_pending_cleanups (1)
2424 || args_size.var)
2425 try_tail_call = try_tail_recursion = 0;
2427 /* Tail recursion fails, when we are not dealing with recursive calls. */
2428 if (!try_tail_recursion
2429 || TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
2430 || TREE_OPERAND (TREE_OPERAND (exp, 0), 0) != current_function_decl)
2431 try_tail_recursion = 0;
2433 /* Rest of purposes for tail call optimizations to fail. */
2434 if (
2435 #ifdef HAVE_sibcall_epilogue
2436 !HAVE_sibcall_epilogue
2437 #else
2439 #endif
2440 || !try_tail_call
2441 /* Doing sibling call optimization needs some work, since
2442 structure_value_addr can be allocated on the stack.
2443 It does not seem worth the effort since few optimizable
2444 sibling calls will return a structure. */
2445 || structure_value_addr != NULL_RTX
2446 /* Check whether the target is able to optimize the call
2447 into a sibcall. */
2448 || !(*targetm.function_ok_for_sibcall) (fndecl, exp)
2449 /* Functions that do not return exactly once may not be sibcall
2450 optimized. */
2451 || (flags & (ECF_RETURNS_TWICE | ECF_LONGJMP | ECF_NORETURN))
2452 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
2453 /* If this function requires more stack slots than the current
2454 function, we cannot change it into a sibling call. */
2455 || args_size.constant > current_function_args_size
2456 /* If the callee pops its own arguments, then it must pop exactly
2457 the same number of arguments as the current function. */
2458 || RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2459 != RETURN_POPS_ARGS (current_function_decl,
2460 TREE_TYPE (current_function_decl),
2461 current_function_args_size))
2462 try_tail_call = 0;
2464 if (try_tail_call || try_tail_recursion)
2466 int end, inc;
2467 actparms = NULL_TREE;
2468 /* Ok, we're going to give the tail call the old college try.
2469 This means we're going to evaluate the function arguments
2470 up to three times. There are two degrees of badness we can
2471 encounter, those that can be unsaved and those that can't.
2472 (See unsafe_for_reeval commentary for details.)
2474 Generate a new argument list. Pass safe arguments through
2475 unchanged. For the easy badness wrap them in UNSAVE_EXPRs.
2476 For hard badness, evaluate them now and put their resulting
2477 rtx in a temporary VAR_DECL.
2479 initialize_argument_information has ordered the array for the
2480 order to be pushed, and we must remember this when reconstructing
2481 the original argument order. */
2483 if (PUSH_ARGS_REVERSED)
2485 inc = 1;
2486 i = 0;
2487 end = num_actuals;
2489 else
2491 inc = -1;
2492 i = num_actuals - 1;
2493 end = -1;
2496 for (; i != end; i += inc)
2498 switch (unsafe_for_reeval (args[i].tree_value))
2500 case 0: /* Safe. */
2501 break;
2503 case 1: /* Mildly unsafe. */
2504 args[i].tree_value = unsave_expr (args[i].tree_value);
2505 break;
2507 case 2: /* Wildly unsafe. */
2509 tree var = build_decl (VAR_DECL, NULL_TREE,
2510 TREE_TYPE (args[i].tree_value));
2511 SET_DECL_RTL (var,
2512 expand_expr (args[i].tree_value, NULL_RTX,
2513 VOIDmode, EXPAND_NORMAL));
2514 args[i].tree_value = var;
2516 break;
2518 default:
2519 abort ();
2521 /* We need to build actparms for optimize_tail_recursion. We can
2522 safely trash away TREE_PURPOSE, since it is unused by this
2523 function. */
2524 if (try_tail_recursion)
2525 actparms = tree_cons (NULL_TREE, args[i].tree_value, actparms);
2527 /* Expanding one of those dangerous arguments could have added
2528 cleanups, but otherwise give it a whirl. */
2529 if (any_pending_cleanups (1))
2530 try_tail_call = try_tail_recursion = 0;
2533 /* Generate a tail recursion sequence when calling ourselves. */
2535 if (try_tail_recursion)
2537 /* We want to emit any pending stack adjustments before the tail
2538 recursion "call". That way we know any adjustment after the tail
2539 recursion call can be ignored if we indeed use the tail recursion
2540 call expansion. */
2541 int save_pending_stack_adjust = pending_stack_adjust;
2542 int save_stack_pointer_delta = stack_pointer_delta;
2544 /* Emit any queued insns now; otherwise they would end up in
2545 only one of the alternates. */
2546 emit_queue ();
2548 /* Use a new sequence to hold any RTL we generate. We do not even
2549 know if we will use this RTL yet. The final decision can not be
2550 made until after RTL generation for the entire function is
2551 complete. */
2552 start_sequence ();
2553 /* If expanding any of the arguments creates cleanups, we can't
2554 do a tailcall. So, we'll need to pop the pending cleanups
2555 list. If, however, all goes well, and there are no cleanups
2556 then the call to expand_start_target_temps will have no
2557 effect. */
2558 expand_start_target_temps ();
2559 if (optimize_tail_recursion (actparms, get_last_insn ()))
2561 if (any_pending_cleanups (1))
2562 try_tail_call = try_tail_recursion = 0;
2563 else
2564 tail_recursion_insns = get_insns ();
2566 expand_end_target_temps ();
2567 end_sequence ();
2569 /* Restore the original pending stack adjustment for the sibling and
2570 normal call cases below. */
2571 pending_stack_adjust = save_pending_stack_adjust;
2572 stack_pointer_delta = save_stack_pointer_delta;
2575 if (profile_arc_flag && (flags & ECF_FORK_OR_EXEC))
2577 /* A fork duplicates the profile information, and an exec discards
2578 it. We can't rely on fork/exec to be paired. So write out the
2579 profile information we have gathered so far, and clear it. */
2580 /* ??? When Linux's __clone is called with CLONE_VM set, profiling
2581 is subject to race conditions, just as with multithreaded
2582 programs. */
2584 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__gcov_flush"),
2585 LCT_ALWAYS_RETURN,
2586 VOIDmode, 0);
2589 /* Ensure current function's preferred stack boundary is at least
2590 what we need. We don't have to increase alignment for recursive
2591 functions. */
2592 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2593 && fndecl != current_function_decl)
2594 cfun->preferred_stack_boundary = preferred_stack_boundary;
2596 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2598 function_call_count++;
2600 /* We want to make two insn chains; one for a sibling call, the other
2601 for a normal call. We will select one of the two chains after
2602 initial RTL generation is complete. */
2603 for (pass = 0; pass < 2; pass++)
2605 int sibcall_failure = 0;
2606 /* We want to emit any pending stack adjustments before the tail
2607 recursion "call". That way we know any adjustment after the tail
2608 recursion call can be ignored if we indeed use the tail recursion
2609 call expansion. */
2610 int save_pending_stack_adjust = 0;
2611 int save_stack_pointer_delta = 0;
2612 rtx insns;
2613 rtx before_call, next_arg_reg;
2615 if (pass == 0)
2617 if (! try_tail_call)
2618 continue;
2620 /* Emit any queued insns now; otherwise they would end up in
2621 only one of the alternates. */
2622 emit_queue ();
2624 /* State variables we need to save and restore between
2625 iterations. */
2626 save_pending_stack_adjust = pending_stack_adjust;
2627 save_stack_pointer_delta = stack_pointer_delta;
2629 if (pass)
2630 flags &= ~ECF_SIBCALL;
2631 else
2632 flags |= ECF_SIBCALL;
2634 /* Other state variables that we must reinitialize each time
2635 through the loop (that are not initialized by the loop itself). */
2636 argblock = 0;
2637 call_fusage = 0;
2639 /* Start a new sequence for the normal call case.
2641 From this point on, if the sibling call fails, we want to set
2642 sibcall_failure instead of continuing the loop. */
2643 start_sequence ();
2645 if (pass == 0)
2647 /* We know at this point that there are not currently any
2648 pending cleanups. If, however, in the process of evaluating
2649 the arguments we were to create some, we'll need to be
2650 able to get rid of them. */
2651 expand_start_target_temps ();
2654 /* Don't let pending stack adjusts add up to too much.
2655 Also, do all pending adjustments now if there is any chance
2656 this might be a call to alloca or if we are expanding a sibling
2657 call sequence or if we are calling a function that is to return
2658 with stack pointer depressed. */
2659 if (pending_stack_adjust >= 32
2660 || (pending_stack_adjust > 0
2661 && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
2662 || pass == 0)
2663 do_pending_stack_adjust ();
2665 /* When calling a const function, we must pop the stack args right away,
2666 so that the pop is deleted or moved with the call. */
2667 if (pass && (flags & ECF_LIBCALL_BLOCK))
2668 NO_DEFER_POP;
2670 #ifdef FINAL_REG_PARM_STACK_SPACE
2671 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2672 args_size.var);
2673 #endif
2674 /* Precompute any arguments as needed. */
2675 if (pass)
2676 precompute_arguments (flags, num_actuals, args);
2678 /* Now we are about to start emitting insns that can be deleted
2679 if a libcall is deleted. */
2680 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
2681 start_sequence ();
2683 adjusted_args_size = args_size;
2684 /* Compute the actual size of the argument block required. The variable
2685 and constant sizes must be combined, the size may have to be rounded,
2686 and there may be a minimum required size. When generating a sibcall
2687 pattern, do not round up, since we'll be re-using whatever space our
2688 caller provided. */
2689 unadjusted_args_size
2690 = compute_argument_block_size (reg_parm_stack_space,
2691 &adjusted_args_size,
2692 (pass == 0 ? 0
2693 : preferred_stack_boundary));
2695 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2697 /* The argument block when performing a sibling call is the
2698 incoming argument block. */
2699 if (pass == 0)
2701 argblock = virtual_incoming_args_rtx;
2702 argblock
2703 #ifdef STACK_GROWS_DOWNWARD
2704 = plus_constant (argblock, current_function_pretend_args_size);
2705 #else
2706 = plus_constant (argblock, -current_function_pretend_args_size);
2707 #endif
2708 stored_args_map = sbitmap_alloc (args_size.constant);
2709 sbitmap_zero (stored_args_map);
2712 /* If we have no actual push instructions, or shouldn't use them,
2713 make space for all args right now. */
2714 else if (adjusted_args_size.var != 0)
2716 if (old_stack_level == 0)
2718 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2719 old_pending_adj = pending_stack_adjust;
2720 pending_stack_adjust = 0;
2721 /* stack_arg_under_construction says whether a stack arg is
2722 being constructed at the old stack level. Pushing the stack
2723 gets a clean outgoing argument block. */
2724 old_stack_arg_under_construction = stack_arg_under_construction;
2725 stack_arg_under_construction = 0;
2727 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2729 else
2731 /* Note that we must go through the motions of allocating an argument
2732 block even if the size is zero because we may be storing args
2733 in the area reserved for register arguments, which may be part of
2734 the stack frame. */
2736 int needed = adjusted_args_size.constant;
2738 /* Store the maximum argument space used. It will be pushed by
2739 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2740 checking). */
2742 if (needed > current_function_outgoing_args_size)
2743 current_function_outgoing_args_size = needed;
2745 if (must_preallocate)
2747 if (ACCUMULATE_OUTGOING_ARGS)
2749 /* Since the stack pointer will never be pushed, it is
2750 possible for the evaluation of a parm to clobber
2751 something we have already written to the stack.
2752 Since most function calls on RISC machines do not use
2753 the stack, this is uncommon, but must work correctly.
2755 Therefore, we save any area of the stack that was already
2756 written and that we are using. Here we set up to do this
2757 by making a new stack usage map from the old one. The
2758 actual save will be done by store_one_arg.
2760 Another approach might be to try to reorder the argument
2761 evaluations to avoid this conflicting stack usage. */
2763 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2764 /* Since we will be writing into the entire argument area,
2765 the map must be allocated for its entire size, not just
2766 the part that is the responsibility of the caller. */
2767 needed += reg_parm_stack_space;
2768 #endif
2770 #ifdef ARGS_GROW_DOWNWARD
2771 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2772 needed + 1);
2773 #else
2774 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2775 needed);
2776 #endif
2777 stack_usage_map
2778 = (char *) alloca (highest_outgoing_arg_in_use);
2780 if (initial_highest_arg_in_use)
2781 memcpy (stack_usage_map, initial_stack_usage_map,
2782 initial_highest_arg_in_use);
2784 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2785 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2786 (highest_outgoing_arg_in_use
2787 - initial_highest_arg_in_use));
2788 needed = 0;
2790 /* The address of the outgoing argument list must not be
2791 copied to a register here, because argblock would be left
2792 pointing to the wrong place after the call to
2793 allocate_dynamic_stack_space below. */
2795 argblock = virtual_outgoing_args_rtx;
2797 else
2799 if (inhibit_defer_pop == 0)
2801 /* Try to reuse some or all of the pending_stack_adjust
2802 to get this space. */
2803 needed
2804 = (combine_pending_stack_adjustment_and_call
2805 (unadjusted_args_size,
2806 &adjusted_args_size,
2807 preferred_unit_stack_boundary));
2809 /* combine_pending_stack_adjustment_and_call computes
2810 an adjustment before the arguments are allocated.
2811 Account for them and see whether or not the stack
2812 needs to go up or down. */
2813 needed = unadjusted_args_size - needed;
2815 if (needed < 0)
2817 /* We're releasing stack space. */
2818 /* ??? We can avoid any adjustment at all if we're
2819 already aligned. FIXME. */
2820 pending_stack_adjust = -needed;
2821 do_pending_stack_adjust ();
2822 needed = 0;
2824 else
2825 /* We need to allocate space. We'll do that in
2826 push_block below. */
2827 pending_stack_adjust = 0;
2830 /* Special case this because overhead of `push_block' in
2831 this case is non-trivial. */
2832 if (needed == 0)
2833 argblock = virtual_outgoing_args_rtx;
2834 else
2835 argblock = push_block (GEN_INT (needed), 0, 0);
2837 /* We only really need to call `copy_to_reg' in the case
2838 where push insns are going to be used to pass ARGBLOCK
2839 to a function call in ARGS. In that case, the stack
2840 pointer changes value from the allocation point to the
2841 call point, and hence the value of
2842 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2843 as well always do it. */
2844 argblock = copy_to_reg (argblock);
2846 /* The save/restore code in store_one_arg handles all
2847 cases except one: a constructor call (including a C
2848 function returning a BLKmode struct) to initialize
2849 an argument. */
2850 if (stack_arg_under_construction)
2852 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2853 rtx push_size = GEN_INT (reg_parm_stack_space
2854 + adjusted_args_size.constant);
2855 #else
2856 rtx push_size = GEN_INT (adjusted_args_size.constant);
2857 #endif
2858 if (old_stack_level == 0)
2860 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2861 NULL_RTX);
2862 old_pending_adj = pending_stack_adjust;
2863 pending_stack_adjust = 0;
2864 /* stack_arg_under_construction says whether a stack
2865 arg is being constructed at the old stack level.
2866 Pushing the stack gets a clean outgoing argument
2867 block. */
2868 old_stack_arg_under_construction
2869 = stack_arg_under_construction;
2870 stack_arg_under_construction = 0;
2871 /* Make a new map for the new argument list. */
2872 stack_usage_map = (char *)
2873 alloca (highest_outgoing_arg_in_use);
2874 memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
2875 highest_outgoing_arg_in_use = 0;
2877 allocate_dynamic_stack_space (push_size, NULL_RTX,
2878 BITS_PER_UNIT);
2880 /* If argument evaluation might modify the stack pointer,
2881 copy the address of the argument list to a register. */
2882 for (i = 0; i < num_actuals; i++)
2883 if (args[i].pass_on_stack)
2885 argblock = copy_addr_to_reg (argblock);
2886 break;
2892 compute_argument_addresses (args, argblock, num_actuals);
2894 /* If we push args individually in reverse order, perform stack alignment
2895 before the first push (the last arg). */
2896 if (PUSH_ARGS_REVERSED && argblock == 0
2897 && adjusted_args_size.constant != unadjusted_args_size)
2899 /* When the stack adjustment is pending, we get better code
2900 by combining the adjustments. */
2901 if (pending_stack_adjust
2902 && ! (flags & ECF_LIBCALL_BLOCK)
2903 && ! inhibit_defer_pop)
2905 pending_stack_adjust
2906 = (combine_pending_stack_adjustment_and_call
2907 (unadjusted_args_size,
2908 &adjusted_args_size,
2909 preferred_unit_stack_boundary));
2910 do_pending_stack_adjust ();
2912 else if (argblock == 0)
2913 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2914 - unadjusted_args_size));
2916 /* Now that the stack is properly aligned, pops can't safely
2917 be deferred during the evaluation of the arguments. */
2918 NO_DEFER_POP;
2920 funexp = rtx_for_function_call (fndecl, exp);
2922 /* Figure out the register where the value, if any, will come back. */
2923 valreg = 0;
2924 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2925 && ! structure_value_addr)
2927 if (pcc_struct_value)
2928 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2929 fndecl, (pass == 0));
2930 else
2931 valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
2934 /* Precompute all register parameters. It isn't safe to compute anything
2935 once we have started filling any specific hard regs. */
2936 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2938 #ifdef REG_PARM_STACK_SPACE
2939 /* Save the fixed argument area if it's part of the caller's frame and
2940 is clobbered by argument setup for this call. */
2941 if (ACCUMULATE_OUTGOING_ARGS && pass)
2942 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2943 &low_to_save, &high_to_save);
2944 #endif
2946 /* Now store (and compute if necessary) all non-register parms.
2947 These come before register parms, since they can require block-moves,
2948 which could clobber the registers used for register parms.
2949 Parms which have partial registers are not stored here,
2950 but we do preallocate space here if they want that. */
2952 for (i = 0; i < num_actuals; i++)
2953 if (args[i].reg == 0 || args[i].pass_on_stack)
2955 rtx before_arg = get_last_insn ();
2957 if (store_one_arg (&args[i], argblock, flags,
2958 adjusted_args_size.var != 0,
2959 reg_parm_stack_space)
2960 || (pass == 0
2961 && check_sibcall_argument_overlap (before_arg,
2962 &args[i])))
2963 sibcall_failure = 1;
2966 /* If we have a parm that is passed in registers but not in memory
2967 and whose alignment does not permit a direct copy into registers,
2968 make a group of pseudos that correspond to each register that we
2969 will later fill. */
2970 if (STRICT_ALIGNMENT)
2971 store_unaligned_arguments_into_pseudos (args, num_actuals);
2973 /* Now store any partially-in-registers parm.
2974 This is the last place a block-move can happen. */
2975 if (reg_parm_seen)
2976 for (i = 0; i < num_actuals; i++)
2977 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2979 rtx before_arg = get_last_insn ();
2981 if (store_one_arg (&args[i], argblock, flags,
2982 adjusted_args_size.var != 0,
2983 reg_parm_stack_space)
2984 || (pass == 0
2985 && check_sibcall_argument_overlap (before_arg,
2986 &args[i])))
2987 sibcall_failure = 1;
2990 /* If we pushed args in forward order, perform stack alignment
2991 after pushing the last arg. */
2992 if (!PUSH_ARGS_REVERSED && argblock == 0)
2993 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2994 - unadjusted_args_size));
2996 /* If register arguments require space on the stack and stack space
2997 was not preallocated, allocate stack space here for arguments
2998 passed in registers. */
2999 #ifdef OUTGOING_REG_PARM_STACK_SPACE
3000 if (!ACCUMULATE_OUTGOING_ARGS
3001 && must_preallocate == 0 && reg_parm_stack_space > 0)
3002 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
3003 #endif
3005 /* Pass the function the address in which to return a
3006 structure value. */
3007 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3009 emit_move_insn (struct_value_rtx,
3010 force_reg (Pmode,
3011 force_operand (structure_value_addr,
3012 NULL_RTX)));
3014 if (GET_CODE (struct_value_rtx) == REG)
3015 use_reg (&call_fusage, struct_value_rtx);
3018 funexp = prepare_call_address (funexp, fndecl, &call_fusage,
3019 reg_parm_seen, pass == 0);
3021 load_register_parameters (args, num_actuals, &call_fusage, flags);
3023 /* Perform postincrements before actually calling the function. */
3024 emit_queue ();
3026 /* Save a pointer to the last insn before the call, so that we can
3027 later safely search backwards to find the CALL_INSN. */
3028 before_call = get_last_insn ();
3030 /* Set up next argument register. For sibling calls on machines
3031 with register windows this should be the incoming register. */
3032 #ifdef FUNCTION_INCOMING_ARG
3033 if (pass == 0)
3034 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
3035 void_type_node, 1);
3036 else
3037 #endif
3038 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
3039 void_type_node, 1);
3041 /* All arguments and registers used for the call must be set up by
3042 now! */
3044 /* Stack must be properly aligned now. */
3045 if (pass && stack_pointer_delta % preferred_unit_stack_boundary)
3046 abort ();
3048 /* Generate the actual call instruction. */
3049 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
3050 adjusted_args_size.constant, struct_value_size,
3051 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
3052 flags, & args_so_far);
3054 /* Verify that we've deallocated all the stack we used. */
3055 if (pass
3056 && old_stack_allocated != stack_pointer_delta - pending_stack_adjust)
3057 abort ();
3059 /* If call is cse'able, make appropriate pair of reg-notes around it.
3060 Test valreg so we don't crash; may safely ignore `const'
3061 if return type is void. Disable for PARALLEL return values, because
3062 we have no way to move such values into a pseudo register. */
3063 if (pass && (flags & ECF_LIBCALL_BLOCK))
3065 rtx insns;
3067 if (valreg == 0 || GET_CODE (valreg) == PARALLEL)
3069 insns = get_insns ();
3070 end_sequence ();
3071 emit_insn (insns);
3073 else
3075 rtx note = 0;
3076 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3078 /* Mark the return value as a pointer if needed. */
3079 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3080 mark_reg_pointer (temp,
3081 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
3083 /* Construct an "equal form" for the value which mentions all the
3084 arguments in order as well as the function name. */
3085 for (i = 0; i < num_actuals; i++)
3086 note = gen_rtx_EXPR_LIST (VOIDmode,
3087 args[i].initial_value, note);
3088 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
3090 insns = get_insns ();
3091 end_sequence ();
3093 if (flags & ECF_PURE)
3094 note = gen_rtx_EXPR_LIST (VOIDmode,
3095 gen_rtx_USE (VOIDmode,
3096 gen_rtx_MEM (BLKmode,
3097 gen_rtx_SCRATCH (VOIDmode))),
3098 note);
3100 emit_libcall_block (insns, temp, valreg, note);
3102 valreg = temp;
3105 else if (pass && (flags & ECF_MALLOC))
3107 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3108 rtx last, insns;
3110 /* The return value from a malloc-like function is a pointer. */
3111 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3112 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
3114 emit_move_insn (temp, valreg);
3116 /* The return value from a malloc-like function can not alias
3117 anything else. */
3118 last = get_last_insn ();
3119 REG_NOTES (last) =
3120 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
3122 /* Write out the sequence. */
3123 insns = get_insns ();
3124 end_sequence ();
3125 emit_insn (insns);
3126 valreg = temp;
3129 /* For calls to `setjmp', etc., inform flow.c it should complain
3130 if nonvolatile values are live. For functions that cannot return,
3131 inform flow that control does not fall through. */
3133 if ((flags & (ECF_NORETURN | ECF_LONGJMP)) || pass == 0)
3135 /* The barrier must be emitted
3136 immediately after the CALL_INSN. Some ports emit more
3137 than just a CALL_INSN above, so we must search for it here. */
3139 rtx last = get_last_insn ();
3140 while (GET_CODE (last) != CALL_INSN)
3142 last = PREV_INSN (last);
3143 /* There was no CALL_INSN? */
3144 if (last == before_call)
3145 abort ();
3148 emit_barrier_after (last);
3151 if (flags & ECF_LONGJMP)
3152 current_function_calls_longjmp = 1;
3154 /* If this function is returning into a memory location marked as
3155 readonly, it means it is initializing that location. But we normally
3156 treat functions as not clobbering such locations, so we need to
3157 specify that this one does. */
3158 if (target != 0 && GET_CODE (target) == MEM
3159 && structure_value_addr != 0 && RTX_UNCHANGING_P (target))
3160 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3162 /* If value type not void, return an rtx for the value. */
3164 /* If there are cleanups to be called, don't use a hard reg as target.
3165 We need to double check this and see if it matters anymore. */
3166 if (any_pending_cleanups (1))
3168 if (target && REG_P (target)
3169 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3170 target = 0;
3171 sibcall_failure = 1;
3174 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
3175 || ignore)
3176 target = const0_rtx;
3177 else if (structure_value_addr)
3179 if (target == 0 || GET_CODE (target) != MEM)
3181 target
3182 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3183 memory_address (TYPE_MODE (TREE_TYPE (exp)),
3184 structure_value_addr));
3185 set_mem_attributes (target, exp, 1);
3188 else if (pcc_struct_value)
3190 /* This is the special C++ case where we need to
3191 know what the true target was. We take care to
3192 never use this value more than once in one expression. */
3193 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3194 copy_to_reg (valreg));
3195 set_mem_attributes (target, exp, 1);
3197 /* Handle calls that return values in multiple non-contiguous locations.
3198 The Irix 6 ABI has examples of this. */
3199 else if (GET_CODE (valreg) == PARALLEL)
3201 if (target == 0)
3203 /* This will only be assigned once, so it can be readonly. */
3204 tree nt = build_qualified_type (TREE_TYPE (exp),
3205 (TYPE_QUALS (TREE_TYPE (exp))
3206 | TYPE_QUAL_CONST));
3208 target = assign_temp (nt, 0, 1, 1);
3209 preserve_temp_slots (target);
3212 if (! rtx_equal_p (target, valreg))
3213 emit_group_store (target, valreg,
3214 int_size_in_bytes (TREE_TYPE (exp)));
3216 /* We can not support sibling calls for this case. */
3217 sibcall_failure = 1;
3219 else if (target
3220 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
3221 && GET_MODE (target) == GET_MODE (valreg))
3223 /* TARGET and VALREG cannot be equal at this point because the
3224 latter would not have REG_FUNCTION_VALUE_P true, while the
3225 former would if it were referring to the same register.
3227 If they refer to the same register, this move will be a no-op,
3228 except when function inlining is being done. */
3229 emit_move_insn (target, valreg);
3231 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
3233 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
3235 /* We can not support sibling calls for this case. */
3236 sibcall_failure = 1;
3238 else
3239 target = copy_to_reg (valreg);
3241 #ifdef PROMOTE_FUNCTION_RETURN
3242 /* If we promoted this return value, make the proper SUBREG. TARGET
3243 might be const0_rtx here, so be careful. */
3244 if (GET_CODE (target) == REG
3245 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3246 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3248 tree type = TREE_TYPE (exp);
3249 int unsignedp = TREE_UNSIGNED (type);
3250 int offset = 0;
3252 /* If we don't promote as expected, something is wrong. */
3253 if (GET_MODE (target)
3254 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
3255 abort ();
3257 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3258 && GET_MODE_SIZE (GET_MODE (target))
3259 > GET_MODE_SIZE (TYPE_MODE (type)))
3261 offset = GET_MODE_SIZE (GET_MODE (target))
3262 - GET_MODE_SIZE (TYPE_MODE (type));
3263 if (! BYTES_BIG_ENDIAN)
3264 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3265 else if (! WORDS_BIG_ENDIAN)
3266 offset %= UNITS_PER_WORD;
3268 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3269 SUBREG_PROMOTED_VAR_P (target) = 1;
3270 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3272 #endif
3274 /* If size of args is variable or this was a constructor call for a stack
3275 argument, restore saved stack-pointer value. */
3277 if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
3279 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3280 pending_stack_adjust = old_pending_adj;
3281 stack_arg_under_construction = old_stack_arg_under_construction;
3282 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3283 stack_usage_map = initial_stack_usage_map;
3284 sibcall_failure = 1;
3286 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3288 #ifdef REG_PARM_STACK_SPACE
3289 if (save_area)
3291 restore_fixed_argument_area (save_area, argblock,
3292 high_to_save, low_to_save);
3294 #endif
3296 /* If we saved any argument areas, restore them. */
3297 for (i = 0; i < num_actuals; i++)
3298 if (args[i].save_area)
3300 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3301 rtx stack_area
3302 = gen_rtx_MEM (save_mode,
3303 memory_address (save_mode,
3304 XEXP (args[i].stack_slot, 0)));
3306 if (save_mode != BLKmode)
3307 emit_move_insn (stack_area, args[i].save_area);
3308 else
3309 emit_block_move (stack_area, args[i].save_area,
3310 GEN_INT (args[i].size.constant),
3311 BLOCK_OP_CALL_PARM);
3314 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3315 stack_usage_map = initial_stack_usage_map;
3318 /* If this was alloca, record the new stack level for nonlocal gotos.
3319 Check for the handler slots since we might not have a save area
3320 for non-local gotos. */
3322 if ((flags & ECF_MAY_BE_ALLOCA) && nonlocal_goto_handler_slots != 0)
3323 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
3325 /* Free up storage we no longer need. */
3326 for (i = 0; i < num_actuals; ++i)
3327 if (args[i].aligned_regs)
3328 free (args[i].aligned_regs);
3330 if (pass == 0)
3332 /* Undo the fake expand_start_target_temps we did earlier. If
3333 there had been any cleanups created, we've already set
3334 sibcall_failure. */
3335 expand_end_target_temps ();
3338 insns = get_insns ();
3339 end_sequence ();
3341 if (pass == 0)
3343 tail_call_insns = insns;
3345 /* Restore the pending stack adjustment now that we have
3346 finished generating the sibling call sequence. */
3348 pending_stack_adjust = save_pending_stack_adjust;
3349 stack_pointer_delta = save_stack_pointer_delta;
3351 /* Prepare arg structure for next iteration. */
3352 for (i = 0; i < num_actuals; i++)
3354 args[i].value = 0;
3355 args[i].aligned_regs = 0;
3356 args[i].stack = 0;
3359 sbitmap_free (stored_args_map);
3361 else
3362 normal_call_insns = insns;
3364 /* If something prevents making this a sibling call,
3365 zero out the sequence. */
3366 if (sibcall_failure)
3367 tail_call_insns = NULL_RTX;
3370 /* The function optimize_sibling_and_tail_recursive_calls doesn't
3371 handle CALL_PLACEHOLDERs inside other CALL_PLACEHOLDERs. This
3372 can happen if the arguments to this function call an inline
3373 function who's expansion contains another CALL_PLACEHOLDER.
3375 If there are any C_Ps in any of these sequences, replace them
3376 with their normal call. */
3378 for (insn = normal_call_insns; insn; insn = NEXT_INSN (insn))
3379 if (GET_CODE (insn) == CALL_INSN
3380 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3381 replace_call_placeholder (insn, sibcall_use_normal);
3383 for (insn = tail_call_insns; insn; insn = NEXT_INSN (insn))
3384 if (GET_CODE (insn) == CALL_INSN
3385 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3386 replace_call_placeholder (insn, sibcall_use_normal);
3388 for (insn = tail_recursion_insns; insn; insn = NEXT_INSN (insn))
3389 if (GET_CODE (insn) == CALL_INSN
3390 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3391 replace_call_placeholder (insn, sibcall_use_normal);
3393 /* If this was a potential tail recursion site, then emit a
3394 CALL_PLACEHOLDER with the normal and the tail recursion streams.
3395 One of them will be selected later. */
3396 if (tail_recursion_insns || tail_call_insns)
3398 /* The tail recursion label must be kept around. We could expose
3399 its use in the CALL_PLACEHOLDER, but that creates unwanted edges
3400 and makes determining true tail recursion sites difficult.
3402 So we set LABEL_PRESERVE_P here, then clear it when we select
3403 one of the call sequences after rtl generation is complete. */
3404 if (tail_recursion_insns)
3405 LABEL_PRESERVE_P (tail_recursion_label) = 1;
3406 emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode, normal_call_insns,
3407 tail_call_insns,
3408 tail_recursion_insns,
3409 tail_recursion_label));
3411 else
3412 emit_insn (normal_call_insns);
3414 currently_expanding_call--;
3416 /* If this function returns with the stack pointer depressed, ensure
3417 this block saves and restores the stack pointer, show it was
3418 changed, and adjust for any outgoing arg space. */
3419 if (flags & ECF_SP_DEPRESSED)
3421 clear_pending_stack_adjust ();
3422 emit_insn (gen_rtx (CLOBBER, VOIDmode, stack_pointer_rtx));
3423 emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3424 save_stack_pointer ();
3427 return target;
3430 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3431 The RETVAL parameter specifies whether return value needs to be saved, other
3432 parameters are documented in the emit_library_call function below. */
3434 static rtx
3435 emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p)
3436 int retval;
3437 rtx orgfun;
3438 rtx value;
3439 enum libcall_type fn_type;
3440 enum machine_mode outmode;
3441 int nargs;
3442 va_list p;
3444 /* Total size in bytes of all the stack-parms scanned so far. */
3445 struct args_size args_size;
3446 /* Size of arguments before any adjustments (such as rounding). */
3447 struct args_size original_args_size;
3448 int argnum;
3449 rtx fun;
3450 int inc;
3451 int count;
3452 struct args_size alignment_pad;
3453 rtx argblock = 0;
3454 CUMULATIVE_ARGS args_so_far;
3455 struct arg
3457 rtx value;
3458 enum machine_mode mode;
3459 rtx reg;
3460 int partial;
3461 struct args_size offset;
3462 struct args_size size;
3463 rtx save_area;
3465 struct arg *argvec;
3466 int old_inhibit_defer_pop = inhibit_defer_pop;
3467 rtx call_fusage = 0;
3468 rtx mem_value = 0;
3469 rtx valreg;
3470 int pcc_struct_value = 0;
3471 int struct_value_size = 0;
3472 int flags;
3473 int reg_parm_stack_space = 0;
3474 int needed;
3475 rtx before_call;
3476 tree tfom; /* type_for_mode (outmode, 0) */
3478 #ifdef REG_PARM_STACK_SPACE
3479 /* Define the boundary of the register parm stack space that needs to be
3480 save, if any. */
3481 int low_to_save = -1, high_to_save = 0;
3482 rtx save_area = 0; /* Place that it is saved. */
3483 #endif
3485 /* Size of the stack reserved for parameter registers. */
3486 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3487 char *initial_stack_usage_map = stack_usage_map;
3489 #ifdef REG_PARM_STACK_SPACE
3490 #ifdef MAYBE_REG_PARM_STACK_SPACE
3491 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3492 #else
3493 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3494 #endif
3495 #endif
3497 /* By default, library functions can not throw. */
3498 flags = ECF_NOTHROW;
3500 switch (fn_type)
3502 case LCT_NORMAL:
3503 break;
3504 case LCT_CONST:
3505 flags |= ECF_CONST;
3506 break;
3507 case LCT_PURE:
3508 flags |= ECF_PURE;
3509 break;
3510 case LCT_CONST_MAKE_BLOCK:
3511 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
3512 break;
3513 case LCT_PURE_MAKE_BLOCK:
3514 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
3515 break;
3516 case LCT_NORETURN:
3517 flags |= ECF_NORETURN;
3518 break;
3519 case LCT_THROW:
3520 flags = ECF_NORETURN;
3521 break;
3522 case LCT_ALWAYS_RETURN:
3523 flags = ECF_ALWAYS_RETURN;
3524 break;
3525 case LCT_RETURNS_TWICE:
3526 flags = ECF_RETURNS_TWICE;
3527 break;
3529 fun = orgfun;
3531 /* Ensure current function's preferred stack boundary is at least
3532 what we need. */
3533 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3534 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3536 /* If this kind of value comes back in memory,
3537 decide where in memory it should come back. */
3538 if (outmode != VOIDmode)
3540 tfom = (*lang_hooks.types.type_for_mode) (outmode, 0);
3541 if (aggregate_value_p (tfom))
3543 #ifdef PCC_STATIC_STRUCT_RETURN
3544 rtx pointer_reg
3545 = hard_function_value (build_pointer_type (tfom), 0, 0);
3546 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3547 pcc_struct_value = 1;
3548 if (value == 0)
3549 value = gen_reg_rtx (outmode);
3550 #else /* not PCC_STATIC_STRUCT_RETURN */
3551 struct_value_size = GET_MODE_SIZE (outmode);
3552 if (value != 0 && GET_CODE (value) == MEM)
3553 mem_value = value;
3554 else
3555 mem_value = assign_temp (tfom, 0, 1, 1);
3556 #endif
3557 /* This call returns a big structure. */
3558 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3561 else
3562 tfom = void_type_node;
3564 /* ??? Unfinished: must pass the memory address as an argument. */
3566 /* Copy all the libcall-arguments out of the varargs data
3567 and into a vector ARGVEC.
3569 Compute how to pass each argument. We only support a very small subset
3570 of the full argument passing conventions to limit complexity here since
3571 library functions shouldn't have many args. */
3573 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
3574 memset ((char *) argvec, 0, (nargs + 1) * sizeof (struct arg));
3576 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3577 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3578 #else
3579 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3580 #endif
3582 args_size.constant = 0;
3583 args_size.var = 0;
3585 count = 0;
3587 /* Now we are about to start emitting insns that can be deleted
3588 if a libcall is deleted. */
3589 if (flags & ECF_LIBCALL_BLOCK)
3590 start_sequence ();
3592 push_temp_slots ();
3594 /* If there's a structure value address to be passed,
3595 either pass it in the special place, or pass it as an extra argument. */
3596 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3598 rtx addr = XEXP (mem_value, 0);
3599 nargs++;
3601 /* Make sure it is a reasonable operand for a move or push insn. */
3602 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3603 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3604 addr = force_operand (addr, NULL_RTX);
3606 argvec[count].value = addr;
3607 argvec[count].mode = Pmode;
3608 argvec[count].partial = 0;
3610 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3611 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3612 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3613 abort ();
3614 #endif
3616 locate_and_pad_parm (Pmode, NULL_TREE,
3617 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3619 #else
3620 argvec[count].reg != 0,
3621 #endif
3622 NULL_TREE, &args_size, &argvec[count].offset,
3623 &argvec[count].size, &alignment_pad);
3625 if (argvec[count].reg == 0 || argvec[count].partial != 0
3626 || reg_parm_stack_space > 0)
3627 args_size.constant += argvec[count].size.constant;
3629 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3631 count++;
3634 for (; count < nargs; count++)
3636 rtx val = va_arg (p, rtx);
3637 enum machine_mode mode = va_arg (p, enum machine_mode);
3639 /* We cannot convert the arg value to the mode the library wants here;
3640 must do it earlier where we know the signedness of the arg. */
3641 if (mode == BLKmode
3642 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3643 abort ();
3645 /* On some machines, there's no way to pass a float to a library fcn.
3646 Pass it as a double instead. */
3647 #ifdef LIBGCC_NEEDS_DOUBLE
3648 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3649 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3650 #endif
3652 /* There's no need to call protect_from_queue, because
3653 either emit_move_insn or emit_push_insn will do that. */
3655 /* Make sure it is a reasonable operand for a move or push insn. */
3656 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3657 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3658 val = force_operand (val, NULL_RTX);
3660 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3661 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3663 rtx slot;
3664 int must_copy = 1
3665 #ifdef FUNCTION_ARG_CALLEE_COPIES
3666 && ! FUNCTION_ARG_CALLEE_COPIES (args_so_far, mode,
3667 NULL_TREE, 1)
3668 #endif
3671 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3672 functions, so we have to pretend this isn't such a function. */
3673 if (flags & ECF_LIBCALL_BLOCK)
3675 rtx insns = get_insns ();
3676 end_sequence ();
3677 emit_insn (insns);
3679 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3681 /* If this was a CONST function, it is now PURE since
3682 it now reads memory. */
3683 if (flags & ECF_CONST)
3685 flags &= ~ECF_CONST;
3686 flags |= ECF_PURE;
3689 if (GET_MODE (val) == MEM && ! must_copy)
3690 slot = val;
3691 else if (must_copy)
3693 slot = assign_temp ((*lang_hooks.types.type_for_mode) (mode, 0),
3694 0, 1, 1);
3695 emit_move_insn (slot, val);
3697 else
3699 tree type = (*lang_hooks.types.type_for_mode) (mode, 0);
3701 slot = gen_rtx_MEM (mode,
3702 expand_expr (build1 (ADDR_EXPR,
3703 build_pointer_type
3704 (type),
3705 make_tree (type, val)),
3706 NULL_RTX, VOIDmode, 0));
3709 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3710 gen_rtx_USE (VOIDmode, slot),
3711 call_fusage);
3712 if (must_copy)
3713 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3714 gen_rtx_CLOBBER (VOIDmode,
3715 slot),
3716 call_fusage);
3718 mode = Pmode;
3719 val = force_operand (XEXP (slot, 0), NULL_RTX);
3721 #endif
3723 argvec[count].value = val;
3724 argvec[count].mode = mode;
3726 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3728 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3729 argvec[count].partial
3730 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3731 #else
3732 argvec[count].partial = 0;
3733 #endif
3735 locate_and_pad_parm (mode, NULL_TREE,
3736 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3738 #else
3739 argvec[count].reg != 0,
3740 #endif
3741 NULL_TREE, &args_size, &argvec[count].offset,
3742 &argvec[count].size, &alignment_pad);
3744 if (argvec[count].size.var)
3745 abort ();
3747 if (reg_parm_stack_space == 0 && argvec[count].partial)
3748 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3750 if (argvec[count].reg == 0 || argvec[count].partial != 0
3751 || reg_parm_stack_space > 0)
3752 args_size.constant += argvec[count].size.constant;
3754 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3757 #ifdef FINAL_REG_PARM_STACK_SPACE
3758 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3759 args_size.var);
3760 #endif
3761 /* If this machine requires an external definition for library
3762 functions, write one out. */
3763 assemble_external_libcall (fun);
3765 original_args_size = args_size;
3766 args_size.constant = (((args_size.constant
3767 + stack_pointer_delta
3768 + STACK_BYTES - 1)
3769 / STACK_BYTES
3770 * STACK_BYTES)
3771 - stack_pointer_delta);
3773 args_size.constant = MAX (args_size.constant,
3774 reg_parm_stack_space);
3776 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3777 args_size.constant -= reg_parm_stack_space;
3778 #endif
3780 if (args_size.constant > current_function_outgoing_args_size)
3781 current_function_outgoing_args_size = args_size.constant;
3783 if (ACCUMULATE_OUTGOING_ARGS)
3785 /* Since the stack pointer will never be pushed, it is possible for
3786 the evaluation of a parm to clobber something we have already
3787 written to the stack. Since most function calls on RISC machines
3788 do not use the stack, this is uncommon, but must work correctly.
3790 Therefore, we save any area of the stack that was already written
3791 and that we are using. Here we set up to do this by making a new
3792 stack usage map from the old one.
3794 Another approach might be to try to reorder the argument
3795 evaluations to avoid this conflicting stack usage. */
3797 needed = args_size.constant;
3799 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3800 /* Since we will be writing into the entire argument area, the
3801 map must be allocated for its entire size, not just the part that
3802 is the responsibility of the caller. */
3803 needed += reg_parm_stack_space;
3804 #endif
3806 #ifdef ARGS_GROW_DOWNWARD
3807 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3808 needed + 1);
3809 #else
3810 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3811 needed);
3812 #endif
3813 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3815 if (initial_highest_arg_in_use)
3816 memcpy (stack_usage_map, initial_stack_usage_map,
3817 initial_highest_arg_in_use);
3819 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3820 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3821 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3822 needed = 0;
3824 /* We must be careful to use virtual regs before they're instantiated,
3825 and real regs afterwards. Loop optimization, for example, can create
3826 new libcalls after we've instantiated the virtual regs, and if we
3827 use virtuals anyway, they won't match the rtl patterns. */
3829 if (virtuals_instantiated)
3830 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3831 else
3832 argblock = virtual_outgoing_args_rtx;
3834 else
3836 if (!PUSH_ARGS)
3837 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3840 /* If we push args individually in reverse order, perform stack alignment
3841 before the first push (the last arg). */
3842 if (argblock == 0 && PUSH_ARGS_REVERSED)
3843 anti_adjust_stack (GEN_INT (args_size.constant
3844 - original_args_size.constant));
3846 if (PUSH_ARGS_REVERSED)
3848 inc = -1;
3849 argnum = nargs - 1;
3851 else
3853 inc = 1;
3854 argnum = 0;
3857 #ifdef REG_PARM_STACK_SPACE
3858 if (ACCUMULATE_OUTGOING_ARGS)
3860 /* The argument list is the property of the called routine and it
3861 may clobber it. If the fixed area has been used for previous
3862 parameters, we must save and restore it.
3864 Here we compute the boundary of the that needs to be saved, if any. */
3866 #ifdef ARGS_GROW_DOWNWARD
3867 for (count = 0; count < reg_parm_stack_space + 1; count++)
3868 #else
3869 for (count = 0; count < reg_parm_stack_space; count++)
3870 #endif
3872 if (count >= highest_outgoing_arg_in_use
3873 || stack_usage_map[count] == 0)
3874 continue;
3876 if (low_to_save == -1)
3877 low_to_save = count;
3879 high_to_save = count;
3882 if (low_to_save >= 0)
3884 int num_to_save = high_to_save - low_to_save + 1;
3885 enum machine_mode save_mode
3886 = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3887 rtx stack_area;
3889 /* If we don't have the required alignment, must do this in BLKmode. */
3890 if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3891 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3892 save_mode = BLKmode;
3894 #ifdef ARGS_GROW_DOWNWARD
3895 stack_area = gen_rtx_MEM (save_mode,
3896 memory_address (save_mode,
3897 plus_constant (argblock,
3898 -high_to_save)));
3899 #else
3900 stack_area = gen_rtx_MEM (save_mode,
3901 memory_address (save_mode,
3902 plus_constant (argblock,
3903 low_to_save)));
3904 #endif
3905 if (save_mode == BLKmode)
3907 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3908 set_mem_align (save_area, PARM_BOUNDARY);
3909 emit_block_move (save_area, stack_area, GEN_INT (num_to_save),
3910 BLOCK_OP_CALL_PARM);
3912 else
3914 save_area = gen_reg_rtx (save_mode);
3915 emit_move_insn (save_area, stack_area);
3919 #endif
3921 /* Push the args that need to be pushed. */
3923 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3924 are to be pushed. */
3925 for (count = 0; count < nargs; count++, argnum += inc)
3927 enum machine_mode mode = argvec[argnum].mode;
3928 rtx val = argvec[argnum].value;
3929 rtx reg = argvec[argnum].reg;
3930 int partial = argvec[argnum].partial;
3931 int lower_bound = 0, upper_bound = 0, i;
3933 if (! (reg != 0 && partial == 0))
3935 if (ACCUMULATE_OUTGOING_ARGS)
3937 /* If this is being stored into a pre-allocated, fixed-size,
3938 stack area, save any previous data at that location. */
3940 #ifdef ARGS_GROW_DOWNWARD
3941 /* stack_slot is negative, but we want to index stack_usage_map
3942 with positive values. */
3943 upper_bound = -argvec[argnum].offset.constant + 1;
3944 lower_bound = upper_bound - argvec[argnum].size.constant;
3945 #else
3946 lower_bound = argvec[argnum].offset.constant;
3947 upper_bound = lower_bound + argvec[argnum].size.constant;
3948 #endif
3950 for (i = lower_bound; i < upper_bound; i++)
3951 if (stack_usage_map[i]
3952 /* Don't store things in the fixed argument area at this
3953 point; it has already been saved. */
3954 && i > reg_parm_stack_space)
3955 break;
3957 if (i != upper_bound)
3959 /* We need to make a save area. See what mode we can make
3960 it. */
3961 enum machine_mode save_mode
3962 = mode_for_size (argvec[argnum].size.constant
3963 * BITS_PER_UNIT,
3964 MODE_INT, 1);
3965 rtx stack_area
3966 = gen_rtx_MEM
3967 (save_mode,
3968 memory_address
3969 (save_mode,
3970 plus_constant (argblock,
3971 argvec[argnum].offset.constant)));
3972 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3974 emit_move_insn (argvec[argnum].save_area, stack_area);
3978 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
3979 partial, reg, 0, argblock,
3980 GEN_INT (argvec[argnum].offset.constant),
3981 reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
3983 /* Now mark the segment we just used. */
3984 if (ACCUMULATE_OUTGOING_ARGS)
3985 for (i = lower_bound; i < upper_bound; i++)
3986 stack_usage_map[i] = 1;
3988 NO_DEFER_POP;
3992 /* If we pushed args in forward order, perform stack alignment
3993 after pushing the last arg. */
3994 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3995 anti_adjust_stack (GEN_INT (args_size.constant
3996 - original_args_size.constant));
3998 if (PUSH_ARGS_REVERSED)
3999 argnum = nargs - 1;
4000 else
4001 argnum = 0;
4003 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0, 0);
4005 /* Now load any reg parms into their regs. */
4007 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4008 are to be pushed. */
4009 for (count = 0; count < nargs; count++, argnum += inc)
4011 rtx val = argvec[argnum].value;
4012 rtx reg = argvec[argnum].reg;
4013 int partial = argvec[argnum].partial;
4015 /* Handle calls that pass values in multiple non-contiguous
4016 locations. The PA64 has examples of this for library calls. */
4017 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4018 emit_group_load (reg, val, GET_MODE_SIZE (GET_MODE (val)));
4019 else if (reg != 0 && partial == 0)
4020 emit_move_insn (reg, val);
4022 NO_DEFER_POP;
4025 /* Any regs containing parms remain in use through the call. */
4026 for (count = 0; count < nargs; count++)
4028 rtx reg = argvec[count].reg;
4029 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4030 use_group_regs (&call_fusage, reg);
4031 else if (reg != 0)
4032 use_reg (&call_fusage, reg);
4035 /* Pass the function the address in which to return a structure value. */
4036 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
4038 emit_move_insn (struct_value_rtx,
4039 force_reg (Pmode,
4040 force_operand (XEXP (mem_value, 0),
4041 NULL_RTX)));
4042 if (GET_CODE (struct_value_rtx) == REG)
4043 use_reg (&call_fusage, struct_value_rtx);
4046 /* Don't allow popping to be deferred, since then
4047 cse'ing of library calls could delete a call and leave the pop. */
4048 NO_DEFER_POP;
4049 valreg = (mem_value == 0 && outmode != VOIDmode
4050 ? hard_libcall_value (outmode) : NULL_RTX);
4052 /* Stack must be properly aligned now. */
4053 if (stack_pointer_delta & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))
4054 abort ();
4056 before_call = get_last_insn ();
4058 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4059 will set inhibit_defer_pop to that value. */
4060 /* The return type is needed to decide how many bytes the function pops.
4061 Signedness plays no role in that, so for simplicity, we pretend it's
4062 always signed. We also assume that the list of arguments passed has
4063 no impact, so we pretend it is unknown. */
4065 emit_call_1 (fun,
4066 get_identifier (XSTR (orgfun, 0)),
4067 build_function_type (tfom, NULL_TREE),
4068 original_args_size.constant, args_size.constant,
4069 struct_value_size,
4070 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
4071 valreg,
4072 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
4074 /* For calls to `setjmp', etc., inform flow.c it should complain
4075 if nonvolatile values are live. For functions that cannot return,
4076 inform flow that control does not fall through. */
4078 if (flags & (ECF_NORETURN | ECF_LONGJMP))
4080 /* The barrier note must be emitted
4081 immediately after the CALL_INSN. Some ports emit more than
4082 just a CALL_INSN above, so we must search for it here. */
4084 rtx last = get_last_insn ();
4085 while (GET_CODE (last) != CALL_INSN)
4087 last = PREV_INSN (last);
4088 /* There was no CALL_INSN? */
4089 if (last == before_call)
4090 abort ();
4093 emit_barrier_after (last);
4096 /* Now restore inhibit_defer_pop to its actual original value. */
4097 OK_DEFER_POP;
4099 /* If call is cse'able, make appropriate pair of reg-notes around it.
4100 Test valreg so we don't crash; may safely ignore `const'
4101 if return type is void. Disable for PARALLEL return values, because
4102 we have no way to move such values into a pseudo register. */
4103 if (flags & ECF_LIBCALL_BLOCK)
4105 rtx insns;
4107 if (valreg == 0 || GET_CODE (valreg) == PARALLEL)
4109 insns = get_insns ();
4110 end_sequence ();
4111 emit_insn (insns);
4113 else
4115 rtx note = 0;
4116 rtx temp = gen_reg_rtx (GET_MODE (valreg));
4117 int i;
4119 /* Construct an "equal form" for the value which mentions all the
4120 arguments in order as well as the function name. */
4121 for (i = 0; i < nargs; i++)
4122 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
4123 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
4125 insns = get_insns ();
4126 end_sequence ();
4128 if (flags & ECF_PURE)
4129 note = gen_rtx_EXPR_LIST (VOIDmode,
4130 gen_rtx_USE (VOIDmode,
4131 gen_rtx_MEM (BLKmode,
4132 gen_rtx_SCRATCH (VOIDmode))),
4133 note);
4135 emit_libcall_block (insns, temp, valreg, note);
4137 valreg = temp;
4140 pop_temp_slots ();
4142 /* Copy the value to the right place. */
4143 if (outmode != VOIDmode && retval)
4145 if (mem_value)
4147 if (value == 0)
4148 value = mem_value;
4149 if (value != mem_value)
4150 emit_move_insn (value, mem_value);
4152 else if (value != 0)
4153 emit_move_insn (value, valreg);
4154 else
4155 value = valreg;
4158 if (ACCUMULATE_OUTGOING_ARGS)
4160 #ifdef REG_PARM_STACK_SPACE
4161 if (save_area)
4163 enum machine_mode save_mode = GET_MODE (save_area);
4164 #ifdef ARGS_GROW_DOWNWARD
4165 rtx stack_area
4166 = gen_rtx_MEM (save_mode,
4167 memory_address (save_mode,
4168 plus_constant (argblock,
4169 - high_to_save)));
4170 #else
4171 rtx stack_area
4172 = gen_rtx_MEM (save_mode,
4173 memory_address (save_mode,
4174 plus_constant (argblock, low_to_save)));
4175 #endif
4177 set_mem_align (stack_area, PARM_BOUNDARY);
4178 if (save_mode != BLKmode)
4179 emit_move_insn (stack_area, save_area);
4180 else
4181 emit_block_move (stack_area, save_area,
4182 GEN_INT (high_to_save - low_to_save + 1),
4183 BLOCK_OP_CALL_PARM);
4185 #endif
4187 /* If we saved any argument areas, restore them. */
4188 for (count = 0; count < nargs; count++)
4189 if (argvec[count].save_area)
4191 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
4192 rtx stack_area
4193 = gen_rtx_MEM (save_mode,
4194 memory_address
4195 (save_mode,
4196 plus_constant (argblock,
4197 argvec[count].offset.constant)));
4199 emit_move_insn (stack_area, argvec[count].save_area);
4202 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4203 stack_usage_map = initial_stack_usage_map;
4206 return value;
4210 /* Output a library call to function FUN (a SYMBOL_REF rtx)
4211 (emitting the queue unless NO_QUEUE is nonzero),
4212 for a value of mode OUTMODE,
4213 with NARGS different arguments, passed as alternating rtx values
4214 and machine_modes to convert them to.
4215 The rtx values should have been passed through protect_from_queue already.
4217 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
4218 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
4219 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
4220 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
4221 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
4222 or other LCT_ value for other types of library calls. */
4224 void
4225 emit_library_call VPARAMS((rtx orgfun, enum libcall_type fn_type,
4226 enum machine_mode outmode, int nargs, ...))
4228 VA_OPEN (p, nargs);
4229 VA_FIXEDARG (p, rtx, orgfun);
4230 VA_FIXEDARG (p, int, fn_type);
4231 VA_FIXEDARG (p, enum machine_mode, outmode);
4232 VA_FIXEDARG (p, int, nargs);
4234 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4236 VA_CLOSE (p);
4239 /* Like emit_library_call except that an extra argument, VALUE,
4240 comes second and says where to store the result.
4241 (If VALUE is zero, this function chooses a convenient way
4242 to return the value.
4244 This function returns an rtx for where the value is to be found.
4245 If VALUE is nonzero, VALUE is returned. */
4248 emit_library_call_value VPARAMS((rtx orgfun, rtx value,
4249 enum libcall_type fn_type,
4250 enum machine_mode outmode, int nargs, ...))
4252 rtx result;
4254 VA_OPEN (p, nargs);
4255 VA_FIXEDARG (p, rtx, orgfun);
4256 VA_FIXEDARG (p, rtx, value);
4257 VA_FIXEDARG (p, int, fn_type);
4258 VA_FIXEDARG (p, enum machine_mode, outmode);
4259 VA_FIXEDARG (p, int, nargs);
4261 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4262 nargs, p);
4264 VA_CLOSE (p);
4266 return result;
4269 /* Store a single argument for a function call
4270 into the register or memory area where it must be passed.
4271 *ARG describes the argument value and where to pass it.
4273 ARGBLOCK is the address of the stack-block for all the arguments,
4274 or 0 on a machine where arguments are pushed individually.
4276 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4277 so must be careful about how the stack is used.
4279 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4280 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4281 that we need not worry about saving and restoring the stack.
4283 FNDECL is the declaration of the function we are calling.
4285 Return nonzero if this arg should cause sibcall failure,
4286 zero otherwise. */
4288 static int
4289 store_one_arg (arg, argblock, flags, variable_size, reg_parm_stack_space)
4290 struct arg_data *arg;
4291 rtx argblock;
4292 int flags;
4293 int variable_size ATTRIBUTE_UNUSED;
4294 int reg_parm_stack_space;
4296 tree pval = arg->tree_value;
4297 rtx reg = 0;
4298 int partial = 0;
4299 int used = 0;
4300 int i, lower_bound = 0, upper_bound = 0;
4301 int sibcall_failure = 0;
4303 if (TREE_CODE (pval) == ERROR_MARK)
4304 return 1;
4306 /* Push a new temporary level for any temporaries we make for
4307 this argument. */
4308 push_temp_slots ();
4310 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4312 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4313 save any previous data at that location. */
4314 if (argblock && ! variable_size && arg->stack)
4316 #ifdef ARGS_GROW_DOWNWARD
4317 /* stack_slot is negative, but we want to index stack_usage_map
4318 with positive values. */
4319 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4320 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4321 else
4322 upper_bound = 0;
4324 lower_bound = upper_bound - arg->size.constant;
4325 #else
4326 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4327 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4328 else
4329 lower_bound = 0;
4331 upper_bound = lower_bound + arg->size.constant;
4332 #endif
4334 for (i = lower_bound; i < upper_bound; i++)
4335 if (stack_usage_map[i]
4336 /* Don't store things in the fixed argument area at this point;
4337 it has already been saved. */
4338 && i > reg_parm_stack_space)
4339 break;
4341 if (i != upper_bound)
4343 /* We need to make a save area. See what mode we can make it. */
4344 enum machine_mode save_mode
4345 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
4346 rtx stack_area
4347 = gen_rtx_MEM (save_mode,
4348 memory_address (save_mode,
4349 XEXP (arg->stack_slot, 0)));
4351 if (save_mode == BLKmode)
4353 tree ot = TREE_TYPE (arg->tree_value);
4354 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4355 | TYPE_QUAL_CONST));
4357 arg->save_area = assign_temp (nt, 0, 1, 1);
4358 preserve_temp_slots (arg->save_area);
4359 emit_block_move (validize_mem (arg->save_area), stack_area,
4360 expr_size (arg->tree_value),
4361 BLOCK_OP_CALL_PARM);
4363 else
4365 arg->save_area = gen_reg_rtx (save_mode);
4366 emit_move_insn (arg->save_area, stack_area);
4370 /* Now that we have saved any slots that will be overwritten by this
4371 store, mark all slots this store will use. We must do this before
4372 we actually expand the argument since the expansion itself may
4373 trigger library calls which might need to use the same stack slot. */
4374 if (argblock && ! variable_size && arg->stack)
4375 for (i = lower_bound; i < upper_bound; i++)
4376 stack_usage_map[i] = 1;
4379 /* If this isn't going to be placed on both the stack and in registers,
4380 set up the register and number of words. */
4381 if (! arg->pass_on_stack)
4383 if (flags & ECF_SIBCALL)
4384 reg = arg->tail_call_reg;
4385 else
4386 reg = arg->reg;
4387 partial = arg->partial;
4390 if (reg != 0 && partial == 0)
4391 /* Being passed entirely in a register. We shouldn't be called in
4392 this case. */
4393 abort ();
4395 /* If this arg needs special alignment, don't load the registers
4396 here. */
4397 if (arg->n_aligned_regs != 0)
4398 reg = 0;
4400 /* If this is being passed partially in a register, we can't evaluate
4401 it directly into its stack slot. Otherwise, we can. */
4402 if (arg->value == 0)
4404 /* stack_arg_under_construction is nonzero if a function argument is
4405 being evaluated directly into the outgoing argument list and
4406 expand_call must take special action to preserve the argument list
4407 if it is called recursively.
4409 For scalar function arguments stack_usage_map is sufficient to
4410 determine which stack slots must be saved and restored. Scalar
4411 arguments in general have pass_on_stack == 0.
4413 If this argument is initialized by a function which takes the
4414 address of the argument (a C++ constructor or a C function
4415 returning a BLKmode structure), then stack_usage_map is
4416 insufficient and expand_call must push the stack around the
4417 function call. Such arguments have pass_on_stack == 1.
4419 Note that it is always safe to set stack_arg_under_construction,
4420 but this generates suboptimal code if set when not needed. */
4422 if (arg->pass_on_stack)
4423 stack_arg_under_construction++;
4425 arg->value = expand_expr (pval,
4426 (partial
4427 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4428 ? NULL_RTX : arg->stack,
4429 VOIDmode, 0);
4431 /* If we are promoting object (or for any other reason) the mode
4432 doesn't agree, convert the mode. */
4434 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4435 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4436 arg->value, arg->unsignedp);
4438 if (arg->pass_on_stack)
4439 stack_arg_under_construction--;
4442 /* Don't allow anything left on stack from computation
4443 of argument to alloca. */
4444 if (flags & ECF_MAY_BE_ALLOCA)
4445 do_pending_stack_adjust ();
4447 if (arg->value == arg->stack)
4448 /* If the value is already in the stack slot, we are done. */
4450 else if (arg->mode != BLKmode)
4452 int size;
4454 /* Argument is a scalar, not entirely passed in registers.
4455 (If part is passed in registers, arg->partial says how much
4456 and emit_push_insn will take care of putting it there.)
4458 Push it, and if its size is less than the
4459 amount of space allocated to it,
4460 also bump stack pointer by the additional space.
4461 Note that in C the default argument promotions
4462 will prevent such mismatches. */
4464 size = GET_MODE_SIZE (arg->mode);
4465 /* Compute how much space the push instruction will push.
4466 On many machines, pushing a byte will advance the stack
4467 pointer by a halfword. */
4468 #ifdef PUSH_ROUNDING
4469 size = PUSH_ROUNDING (size);
4470 #endif
4471 used = size;
4473 /* Compute how much space the argument should get:
4474 round up to a multiple of the alignment for arguments. */
4475 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4476 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4477 / (PARM_BOUNDARY / BITS_PER_UNIT))
4478 * (PARM_BOUNDARY / BITS_PER_UNIT));
4480 /* This isn't already where we want it on the stack, so put it there.
4481 This can either be done with push or copy insns. */
4482 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4483 PARM_BOUNDARY, partial, reg, used - size, argblock,
4484 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
4485 ARGS_SIZE_RTX (arg->alignment_pad));
4487 /* Unless this is a partially-in-register argument, the argument is now
4488 in the stack. */
4489 if (partial == 0)
4490 arg->value = arg->stack;
4492 else
4494 /* BLKmode, at least partly to be pushed. */
4496 unsigned int parm_align;
4497 int excess;
4498 rtx size_rtx;
4500 /* Pushing a nonscalar.
4501 If part is passed in registers, PARTIAL says how much
4502 and emit_push_insn will take care of putting it there. */
4504 /* Round its size up to a multiple
4505 of the allocation unit for arguments. */
4507 if (arg->size.var != 0)
4509 excess = 0;
4510 size_rtx = ARGS_SIZE_RTX (arg->size);
4512 else
4514 /* PUSH_ROUNDING has no effect on us, because
4515 emit_push_insn for BLKmode is careful to avoid it. */
4516 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
4517 + partial * UNITS_PER_WORD);
4518 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4519 NULL_RTX, TYPE_MODE (sizetype), 0);
4522 /* Some types will require stricter alignment, which will be
4523 provided for elsewhere in argument layout. */
4524 parm_align = MAX (PARM_BOUNDARY, TYPE_ALIGN (TREE_TYPE (pval)));
4526 /* When an argument is padded down, the block is aligned to
4527 PARM_BOUNDARY, but the actual argument isn't. */
4528 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4530 if (arg->size.var)
4531 parm_align = BITS_PER_UNIT;
4532 else if (excess)
4534 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4535 parm_align = MIN (parm_align, excess_align);
4539 if ((flags & ECF_SIBCALL) && GET_CODE (arg->value) == MEM)
4541 /* emit_push_insn might not work properly if arg->value and
4542 argblock + arg->offset areas overlap. */
4543 rtx x = arg->value;
4544 int i = 0;
4546 if (XEXP (x, 0) == current_function_internal_arg_pointer
4547 || (GET_CODE (XEXP (x, 0)) == PLUS
4548 && XEXP (XEXP (x, 0), 0) ==
4549 current_function_internal_arg_pointer
4550 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4552 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4553 i = INTVAL (XEXP (XEXP (x, 0), 1));
4555 /* expand_call should ensure this */
4556 if (arg->offset.var || GET_CODE (size_rtx) != CONST_INT)
4557 abort ();
4559 if (arg->offset.constant > i)
4561 if (arg->offset.constant < i + INTVAL (size_rtx))
4562 sibcall_failure = 1;
4564 else if (arg->offset.constant < i)
4566 if (i < arg->offset.constant + INTVAL (size_rtx))
4567 sibcall_failure = 1;
4572 /* Special handling is required if part of the parameter lies in the
4573 register parameter area. The argument may be copied into the stack
4574 slot using memcpy(), but the original contents of the register
4575 parameter area will be restored after the memcpy() call.
4577 To ensure that the part that lies in the register parameter area
4578 is copied correctly, we emit a separate push for that part. This
4579 push should be small enough to avoid a call to memcpy(). */
4580 #ifndef STACK_PARMS_IN_REG_PARM_AREA
4581 if (arg->reg && arg->pass_on_stack)
4582 #else
4583 if (1)
4584 #endif
4586 if (arg->offset.constant < reg_parm_stack_space && arg->offset.var)
4587 error ("variable offset is passed partially in stack and in reg");
4588 else if (arg->offset.constant < reg_parm_stack_space && arg->size.var)
4589 error ("variable size is passed partially in stack and in reg");
4590 else if (arg->offset.constant < reg_parm_stack_space
4591 && ((arg->offset.constant + arg->size.constant)
4592 > reg_parm_stack_space))
4594 rtx size_rtx1 = GEN_INT (reg_parm_stack_space - arg->offset.constant);
4595 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx1,
4596 parm_align, partial, reg, excess, argblock,
4597 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
4598 ARGS_SIZE_RTX (arg->alignment_pad));
4603 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4604 parm_align, partial, reg, excess, argblock,
4605 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
4606 ARGS_SIZE_RTX (arg->alignment_pad));
4608 /* Unless this is a partially-in-register argument, the argument is now
4609 in the stack.
4611 ??? Unlike the case above, in which we want the actual
4612 address of the data, so that we can load it directly into a
4613 register, here we want the address of the stack slot, so that
4614 it's properly aligned for word-by-word copying or something
4615 like that. It's not clear that this is always correct. */
4616 if (partial == 0)
4617 arg->value = arg->stack_slot;
4620 /* Once we have pushed something, pops can't safely
4621 be deferred during the rest of the arguments. */
4622 NO_DEFER_POP;
4624 /* ANSI doesn't require a sequence point here,
4625 but PCC has one, so this will avoid some problems. */
4626 emit_queue ();
4628 /* Free any temporary slots made in processing this argument. Show
4629 that we might have taken the address of something and pushed that
4630 as an operand. */
4631 preserve_temp_slots (NULL_RTX);
4632 free_temp_slots ();
4633 pop_temp_slots ();
4635 return sibcall_failure;