* config/xtensa/crti.asm (_init, _fini): Increase frame size to 64.
[official-gcc.git] / gcc / calls.c
blob7add542656f3067e27a0beb1a359749c0d88972e
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "expr.h"
30 #include "optabs.h"
31 #include "libfuncs.h"
32 #include "function.h"
33 #include "regs.h"
34 #include "toplev.h"
35 #include "output.h"
36 #include "tm_p.h"
37 #include "timevar.h"
38 #include "sbitmap.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "cgraph.h"
42 #include "except.h"
44 #ifndef STACK_POINTER_OFFSET
45 #define STACK_POINTER_OFFSET 0
46 #endif
48 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
49 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
51 /* Data structure and subroutines used within expand_call. */
53 struct arg_data
55 /* Tree node for this argument. */
56 tree tree_value;
57 /* Mode for value; TYPE_MODE unless promoted. */
58 enum machine_mode mode;
59 /* Current RTL value for argument, or 0 if it isn't precomputed. */
60 rtx value;
61 /* Initially-compute RTL value for argument; only for const functions. */
62 rtx initial_value;
63 /* Register to pass this argument in, 0 if passed on stack, or an
64 PARALLEL if the arg is to be copied into multiple non-contiguous
65 registers. */
66 rtx reg;
67 /* Register to pass this argument in when generating tail call sequence.
68 This is not the same register as for normal calls on machines with
69 register windows. */
70 rtx tail_call_reg;
71 /* If REG was promoted from the actual mode of the argument expression,
72 indicates whether the promotion is sign- or zero-extended. */
73 int unsignedp;
74 /* Number of registers to use. 0 means put the whole arg in registers.
75 Also 0 if not passed in registers. */
76 int partial;
77 /* Nonzero if argument must be passed on stack.
78 Note that some arguments may be passed on the stack
79 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
80 pass_on_stack identifies arguments that *cannot* go in registers. */
81 int pass_on_stack;
82 /* Some fields packaged up for locate_and_pad_parm. */
83 struct locate_and_pad_arg_data locate;
84 /* Location on the stack at which parameter should be stored. The store
85 has already been done if STACK == VALUE. */
86 rtx stack;
87 /* Location on the stack of the start of this argument slot. This can
88 differ from STACK if this arg pads downward. This location is known
89 to be aligned to FUNCTION_ARG_BOUNDARY. */
90 rtx stack_slot;
91 /* Place that this stack area has been saved, if needed. */
92 rtx save_area;
93 /* If an argument's alignment does not permit direct copying into registers,
94 copy in smaller-sized pieces into pseudos. These are stored in a
95 block pointed to by this field. The next field says how many
96 word-sized pseudos we made. */
97 rtx *aligned_regs;
98 int n_aligned_regs;
101 /* A vector of one char per byte of stack space. A byte if nonzero if
102 the corresponding stack location has been used.
103 This vector is used to prevent a function call within an argument from
104 clobbering any stack already set up. */
105 static char *stack_usage_map;
107 /* Size of STACK_USAGE_MAP. */
108 static int highest_outgoing_arg_in_use;
110 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
111 stack location's tail call argument has been already stored into the stack.
112 This bitmap is used to prevent sibling call optimization if function tries
113 to use parent's incoming argument slots when they have been already
114 overwritten with tail call arguments. */
115 static sbitmap stored_args_map;
117 /* stack_arg_under_construction is nonzero when an argument may be
118 initialized with a constructor call (including a C function that
119 returns a BLKmode struct) and expand_call must take special action
120 to make sure the object being constructed does not overlap the
121 argument list for the constructor call. */
122 int stack_arg_under_construction;
124 static int calls_function (tree, int);
125 static int calls_function_1 (tree, int);
127 static void emit_call_1 (rtx, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
128 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
129 CUMULATIVE_ARGS *);
130 static void precompute_register_parameters (int, struct arg_data *, int *);
131 static int store_one_arg (struct arg_data *, rtx, int, int, int);
132 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
133 static int finalize_must_preallocate (int, int, struct arg_data *,
134 struct args_size *);
135 static void precompute_arguments (int, int, struct arg_data *);
136 static int compute_argument_block_size (int, struct args_size *, int);
137 static void initialize_argument_information (int, struct arg_data *,
138 struct args_size *, int, tree,
139 tree, CUMULATIVE_ARGS *, int,
140 rtx *, int *, int *, int *);
141 static void compute_argument_addresses (struct arg_data *, rtx, int);
142 static rtx rtx_for_function_call (tree, tree);
143 static void load_register_parameters (struct arg_data *, int, rtx *, int,
144 int, int *);
145 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
146 enum machine_mode, int, va_list);
147 static int special_function_p (tree, int);
148 static rtx try_to_integrate (tree, tree, rtx, int, tree, rtx);
149 static int check_sibcall_argument_overlap_1 (rtx);
150 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
152 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
153 int);
154 static tree fix_unsafe_tree (tree);
156 #ifdef REG_PARM_STACK_SPACE
157 static rtx save_fixed_argument_area (int, rtx, int *, int *);
158 static void restore_fixed_argument_area (rtx, rtx, int, int);
159 #endif
161 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
162 `alloca'.
164 If WHICH is 0, return 1 if EXP contains a call to any function.
165 Actually, we only need return 1 if evaluating EXP would require pushing
166 arguments on the stack, but that is too difficult to compute, so we just
167 assume any function call might require the stack. */
169 static tree calls_function_save_exprs;
171 static int
172 calls_function (tree exp, int which)
174 int val;
176 calls_function_save_exprs = 0;
177 val = calls_function_1 (exp, which);
178 calls_function_save_exprs = 0;
179 return val;
182 /* Recursive function to do the work of above function. */
184 static int
185 calls_function_1 (tree exp, int which)
187 int i;
188 enum tree_code code = TREE_CODE (exp);
189 int class = TREE_CODE_CLASS (code);
190 int length = first_rtl_op (code);
192 /* If this code is language-specific, we don't know what it will do. */
193 if ((int) code >= NUM_TREE_CODES)
194 return 1;
196 switch (code)
198 case CALL_EXPR:
199 if (which == 0)
200 return 1;
201 else if ((TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
202 == FUNCTION_TYPE)
203 && (TYPE_RETURNS_STACK_DEPRESSED
204 (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
205 return 1;
206 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
207 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
208 == FUNCTION_DECL)
209 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
211 & ECF_MAY_BE_ALLOCA))
212 return 1;
214 break;
216 case CONSTRUCTOR:
218 tree tem;
220 for (tem = CONSTRUCTOR_ELTS (exp); tem != 0; tem = TREE_CHAIN (tem))
221 if (calls_function_1 (TREE_VALUE (tem), which))
222 return 1;
225 return 0;
227 case SAVE_EXPR:
228 if (SAVE_EXPR_RTL (exp) != 0)
229 return 0;
230 if (value_member (exp, calls_function_save_exprs))
231 return 0;
232 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
233 calls_function_save_exprs);
234 return (TREE_OPERAND (exp, 0) != 0
235 && calls_function_1 (TREE_OPERAND (exp, 0), which));
237 case BLOCK:
239 tree local;
240 tree subblock;
242 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
243 if (DECL_INITIAL (local) != 0
244 && calls_function_1 (DECL_INITIAL (local), which))
245 return 1;
247 for (subblock = BLOCK_SUBBLOCKS (exp);
248 subblock;
249 subblock = TREE_CHAIN (subblock))
250 if (calls_function_1 (subblock, which))
251 return 1;
253 return 0;
255 case TREE_LIST:
256 for (; exp != 0; exp = TREE_CHAIN (exp))
257 if (calls_function_1 (TREE_VALUE (exp), which))
258 return 1;
259 return 0;
261 default:
262 break;
265 /* Only expressions, references, and blocks can contain calls. */
266 if (! IS_EXPR_CODE_CLASS (class) && class != 'r' && class != 'b')
267 return 0;
269 for (i = 0; i < length; i++)
270 if (TREE_OPERAND (exp, i) != 0
271 && calls_function_1 (TREE_OPERAND (exp, i), which))
272 return 1;
274 return 0;
277 /* Force FUNEXP into a form suitable for the address of a CALL,
278 and return that as an rtx. Also load the static chain register
279 if FNDECL is a nested function.
281 CALL_FUSAGE points to a variable holding the prospective
282 CALL_INSN_FUNCTION_USAGE information. */
285 prepare_call_address (rtx funexp, tree fndecl, rtx *call_fusage,
286 int reg_parm_seen, int sibcallp)
288 rtx static_chain_value = 0;
290 funexp = protect_from_queue (funexp, 0);
292 if (fndecl != 0)
293 /* Get possible static chain value for nested function in C. */
294 static_chain_value = lookup_static_chain (fndecl);
296 /* Make a valid memory address and copy constants thru pseudo-regs,
297 but not for a constant address if -fno-function-cse. */
298 if (GET_CODE (funexp) != SYMBOL_REF)
299 /* If we are using registers for parameters, force the
300 function address into a register now. */
301 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
302 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
303 : memory_address (FUNCTION_MODE, funexp));
304 else if (! sibcallp)
306 #ifndef NO_FUNCTION_CSE
307 if (optimize && ! flag_no_function_cse)
308 #ifdef NO_RECURSIVE_FUNCTION_CSE
309 if (fndecl != current_function_decl)
310 #endif
311 funexp = force_reg (Pmode, funexp);
312 #endif
315 if (static_chain_value != 0)
317 emit_move_insn (static_chain_rtx, static_chain_value);
319 if (GET_CODE (static_chain_rtx) == REG)
320 use_reg (call_fusage, static_chain_rtx);
323 return funexp;
326 /* Generate instructions to call function FUNEXP,
327 and optionally pop the results.
328 The CALL_INSN is the first insn generated.
330 FNDECL is the declaration node of the function. This is given to the
331 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
333 FUNTYPE is the data type of the function. This is given to the macro
334 RETURN_POPS_ARGS to determine whether this function pops its own args.
335 We used to allow an identifier for library functions, but that doesn't
336 work when the return type is an aggregate type and the calling convention
337 says that the pointer to this aggregate is to be popped by the callee.
339 STACK_SIZE is the number of bytes of arguments on the stack,
340 ROUNDED_STACK_SIZE is that number rounded up to
341 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
342 both to put into the call insn and to generate explicit popping
343 code if necessary.
345 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
346 It is zero if this call doesn't want a structure value.
348 NEXT_ARG_REG is the rtx that results from executing
349 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
350 just after all the args have had their registers assigned.
351 This could be whatever you like, but normally it is the first
352 arg-register beyond those used for args in this call,
353 or 0 if all the arg-registers are used in this call.
354 It is passed on to `gen_call' so you can put this info in the call insn.
356 VALREG is a hard register in which a value is returned,
357 or 0 if the call does not return a value.
359 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
360 the args to this call were processed.
361 We restore `inhibit_defer_pop' to that value.
363 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
364 denote registers used by the called function. */
366 static void
367 emit_call_1 (rtx funexp, tree fndecl ATTRIBUTE_UNUSED, tree funtype ATTRIBUTE_UNUSED,
368 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
369 HOST_WIDE_INT rounded_stack_size,
370 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
371 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
372 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
373 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
375 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
376 rtx call_insn;
377 int already_popped = 0;
378 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
379 #if defined (HAVE_call) && defined (HAVE_call_value)
380 rtx struct_value_size_rtx;
381 struct_value_size_rtx = GEN_INT (struct_value_size);
382 #endif
384 #ifdef CALL_POPS_ARGS
385 n_popped += CALL_POPS_ARGS (* args_so_far);
386 #endif
388 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
389 and we don't want to load it into a register as an optimization,
390 because prepare_call_address already did it if it should be done. */
391 if (GET_CODE (funexp) != SYMBOL_REF)
392 funexp = memory_address (FUNCTION_MODE, funexp);
394 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
395 if ((ecf_flags & ECF_SIBCALL)
396 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
397 && (n_popped > 0 || stack_size == 0))
399 rtx n_pop = GEN_INT (n_popped);
400 rtx pat;
402 /* If this subroutine pops its own args, record that in the call insn
403 if possible, for the sake of frame pointer elimination. */
405 if (valreg)
406 pat = GEN_SIBCALL_VALUE_POP (valreg,
407 gen_rtx_MEM (FUNCTION_MODE, funexp),
408 rounded_stack_size_rtx, next_arg_reg,
409 n_pop);
410 else
411 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
412 rounded_stack_size_rtx, next_arg_reg, n_pop);
414 emit_call_insn (pat);
415 already_popped = 1;
417 else
418 #endif
420 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
421 /* If the target has "call" or "call_value" insns, then prefer them
422 if no arguments are actually popped. If the target does not have
423 "call" or "call_value" insns, then we must use the popping versions
424 even if the call has no arguments to pop. */
425 #if defined (HAVE_call) && defined (HAVE_call_value)
426 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
427 && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
428 #else
429 if (HAVE_call_pop && HAVE_call_value_pop)
430 #endif
432 rtx n_pop = GEN_INT (n_popped);
433 rtx pat;
435 /* If this subroutine pops its own args, record that in the call insn
436 if possible, for the sake of frame pointer elimination. */
438 if (valreg)
439 pat = GEN_CALL_VALUE_POP (valreg,
440 gen_rtx_MEM (FUNCTION_MODE, funexp),
441 rounded_stack_size_rtx, next_arg_reg, n_pop);
442 else
443 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
444 rounded_stack_size_rtx, next_arg_reg, n_pop);
446 emit_call_insn (pat);
447 already_popped = 1;
449 else
450 #endif
452 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
453 if ((ecf_flags & ECF_SIBCALL)
454 && HAVE_sibcall && HAVE_sibcall_value)
456 if (valreg)
457 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
458 gen_rtx_MEM (FUNCTION_MODE, funexp),
459 rounded_stack_size_rtx,
460 next_arg_reg, NULL_RTX));
461 else
462 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
463 rounded_stack_size_rtx, next_arg_reg,
464 struct_value_size_rtx));
466 else
467 #endif
469 #if defined (HAVE_call) && defined (HAVE_call_value)
470 if (HAVE_call && HAVE_call_value)
472 if (valreg)
473 emit_call_insn (GEN_CALL_VALUE (valreg,
474 gen_rtx_MEM (FUNCTION_MODE, funexp),
475 rounded_stack_size_rtx, next_arg_reg,
476 NULL_RTX));
477 else
478 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
479 rounded_stack_size_rtx, next_arg_reg,
480 struct_value_size_rtx));
482 else
483 #endif
484 abort ();
486 /* Find the call we just emitted. */
487 call_insn = last_call_insn ();
489 /* Mark memory as used for "pure" function call. */
490 if (ecf_flags & ECF_PURE)
491 call_fusage
492 = gen_rtx_EXPR_LIST
493 (VOIDmode,
494 gen_rtx_USE (VOIDmode,
495 gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
496 call_fusage);
498 /* Put the register usage information there. */
499 add_function_usage_to (call_insn, call_fusage);
501 /* If this is a const call, then set the insn's unchanging bit. */
502 if (ecf_flags & (ECF_CONST | ECF_PURE))
503 CONST_OR_PURE_CALL_P (call_insn) = 1;
505 /* If this call can't throw, attach a REG_EH_REGION reg note to that
506 effect. */
507 if (ecf_flags & ECF_NOTHROW)
508 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
509 REG_NOTES (call_insn));
510 else
511 note_eh_region_may_contain_throw ();
513 if (ecf_flags & ECF_NORETURN)
514 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
515 REG_NOTES (call_insn));
516 if (ecf_flags & ECF_ALWAYS_RETURN)
517 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_ALWAYS_RETURN, const0_rtx,
518 REG_NOTES (call_insn));
520 if (ecf_flags & ECF_RETURNS_TWICE)
522 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
523 REG_NOTES (call_insn));
524 current_function_calls_setjmp = 1;
527 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
529 /* Restore this now, so that we do defer pops for this call's args
530 if the context of the call as a whole permits. */
531 inhibit_defer_pop = old_inhibit_defer_pop;
533 /* Don't bother cleaning up after a noreturn function. */
534 if (ecf_flags & (ECF_NORETURN | ECF_LONGJMP))
535 return;
537 if (n_popped > 0)
539 if (!already_popped)
540 CALL_INSN_FUNCTION_USAGE (call_insn)
541 = gen_rtx_EXPR_LIST (VOIDmode,
542 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
543 CALL_INSN_FUNCTION_USAGE (call_insn));
544 rounded_stack_size -= n_popped;
545 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
546 stack_pointer_delta -= n_popped;
549 if (!ACCUMULATE_OUTGOING_ARGS)
551 /* If returning from the subroutine does not automatically pop the args,
552 we need an instruction to pop them sooner or later.
553 Perhaps do it now; perhaps just record how much space to pop later.
555 If returning from the subroutine does pop the args, indicate that the
556 stack pointer will be changed. */
558 if (rounded_stack_size != 0)
560 if (ecf_flags & ECF_SP_DEPRESSED)
561 /* Just pretend we did the pop. */
562 stack_pointer_delta -= rounded_stack_size;
563 else if (flag_defer_pop && inhibit_defer_pop == 0
564 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
565 pending_stack_adjust += rounded_stack_size;
566 else
567 adjust_stack (rounded_stack_size_rtx);
570 /* When we accumulate outgoing args, we must avoid any stack manipulations.
571 Restore the stack pointer to its original value now. Usually
572 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
573 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
574 popping variants of functions exist as well.
576 ??? We may optimize similar to defer_pop above, but it is
577 probably not worthwhile.
579 ??? It will be worthwhile to enable combine_stack_adjustments even for
580 such machines. */
581 else if (n_popped)
582 anti_adjust_stack (GEN_INT (n_popped));
585 /* Determine if the function identified by NAME and FNDECL is one with
586 special properties we wish to know about.
588 For example, if the function might return more than one time (setjmp), then
589 set RETURNS_TWICE to a nonzero value.
591 Similarly set LONGJMP for if the function is in the longjmp family.
593 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
594 space from the stack such as alloca. */
596 static int
597 special_function_p (tree fndecl, int flags)
599 if (! (flags & ECF_MALLOC)
600 && fndecl && DECL_NAME (fndecl)
601 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
602 /* Exclude functions not at the file scope, or not `extern',
603 since they are not the magic functions we would otherwise
604 think they are.
605 FIXME: this should be handled with attributes, not with this
606 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
607 because you can declare fork() inside a function if you
608 wish. */
609 && (DECL_CONTEXT (fndecl) == NULL_TREE
610 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
611 && TREE_PUBLIC (fndecl))
613 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
614 const char *tname = name;
616 /* We assume that alloca will always be called by name. It
617 makes no sense to pass it as a pointer-to-function to
618 anything that does not understand its behavior. */
619 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
620 && name[0] == 'a'
621 && ! strcmp (name, "alloca"))
622 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
623 && name[0] == '_'
624 && ! strcmp (name, "__builtin_alloca"))))
625 flags |= ECF_MAY_BE_ALLOCA;
627 /* Disregard prefix _, __ or __x. */
628 if (name[0] == '_')
630 if (name[1] == '_' && name[2] == 'x')
631 tname += 3;
632 else if (name[1] == '_')
633 tname += 2;
634 else
635 tname += 1;
638 if (tname[0] == 's')
640 if ((tname[1] == 'e'
641 && (! strcmp (tname, "setjmp")
642 || ! strcmp (tname, "setjmp_syscall")))
643 || (tname[1] == 'i'
644 && ! strcmp (tname, "sigsetjmp"))
645 || (tname[1] == 'a'
646 && ! strcmp (tname, "savectx")))
647 flags |= ECF_RETURNS_TWICE;
649 if (tname[1] == 'i'
650 && ! strcmp (tname, "siglongjmp"))
651 flags |= ECF_LONGJMP;
653 else if ((tname[0] == 'q' && tname[1] == 's'
654 && ! strcmp (tname, "qsetjmp"))
655 || (tname[0] == 'v' && tname[1] == 'f'
656 && ! strcmp (tname, "vfork")))
657 flags |= ECF_RETURNS_TWICE;
659 else if (tname[0] == 'l' && tname[1] == 'o'
660 && ! strcmp (tname, "longjmp"))
661 flags |= ECF_LONGJMP;
663 else if ((tname[0] == 'f' && tname[1] == 'o'
664 && ! strcmp (tname, "fork"))
665 /* Linux specific: __clone. check NAME to insist on the
666 leading underscores, to avoid polluting the ISO / POSIX
667 namespace. */
668 || (name[0] == '_' && name[1] == '_'
669 && ! strcmp (tname, "clone"))
670 || (tname[0] == 'e' && tname[1] == 'x' && tname[2] == 'e'
671 && tname[3] == 'c' && (tname[4] == 'l' || tname[4] == 'v')
672 && (tname[5] == '\0'
673 || ((tname[5] == 'p' || tname[5] == 'e')
674 && tname[6] == '\0'))))
675 flags |= ECF_FORK_OR_EXEC;
677 return flags;
680 /* Return nonzero when tree represent call to longjmp. */
683 setjmp_call_p (tree fndecl)
685 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
688 /* Return true when exp contains alloca call. */
689 bool
690 alloca_call_p (tree exp)
692 if (TREE_CODE (exp) == CALL_EXPR
693 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
694 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
695 == FUNCTION_DECL)
696 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
697 0) & ECF_MAY_BE_ALLOCA))
698 return true;
699 return false;
702 /* Detect flags (function attributes) from the function decl or type node. */
705 flags_from_decl_or_type (tree exp)
707 int flags = 0;
708 tree type = exp;
710 if (DECL_P (exp))
712 struct cgraph_rtl_info *i = cgraph_rtl_info (exp);
713 type = TREE_TYPE (exp);
715 if (i)
717 if (i->pure_function)
718 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
719 if (i->const_function)
720 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
723 /* The function exp may have the `malloc' attribute. */
724 if (DECL_IS_MALLOC (exp))
725 flags |= ECF_MALLOC;
727 /* The function exp may have the `pure' attribute. */
728 if (DECL_IS_PURE (exp))
729 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
731 if (TREE_NOTHROW (exp))
732 flags |= ECF_NOTHROW;
734 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
735 flags |= ECF_LIBCALL_BLOCK;
738 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
739 flags |= ECF_CONST;
741 if (TREE_THIS_VOLATILE (exp))
742 flags |= ECF_NORETURN;
744 /* Mark if the function returns with the stack pointer depressed. We
745 cannot consider it pure or constant in that case. */
746 if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
748 flags |= ECF_SP_DEPRESSED;
749 flags &= ~(ECF_PURE | ECF_CONST | ECF_LIBCALL_BLOCK);
752 return flags;
755 /* Precompute all register parameters as described by ARGS, storing values
756 into fields within the ARGS array.
758 NUM_ACTUALS indicates the total number elements in the ARGS array.
760 Set REG_PARM_SEEN if we encounter a register parameter. */
762 static void
763 precompute_register_parameters (int num_actuals, struct arg_data *args, int *reg_parm_seen)
765 int i;
767 *reg_parm_seen = 0;
769 for (i = 0; i < num_actuals; i++)
770 if (args[i].reg != 0 && ! args[i].pass_on_stack)
772 *reg_parm_seen = 1;
774 if (args[i].value == 0)
776 push_temp_slots ();
777 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
778 VOIDmode, 0);
779 preserve_temp_slots (args[i].value);
780 pop_temp_slots ();
782 /* ANSI doesn't require a sequence point here,
783 but PCC has one, so this will avoid some problems. */
784 emit_queue ();
787 /* If the value is a non-legitimate constant, force it into a
788 pseudo now. TLS symbols sometimes need a call to resolve. */
789 if (CONSTANT_P (args[i].value)
790 && !LEGITIMATE_CONSTANT_P (args[i].value))
791 args[i].value = force_reg (args[i].mode, args[i].value);
793 /* If we are to promote the function arg to a wider mode,
794 do it now. */
796 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
797 args[i].value
798 = convert_modes (args[i].mode,
799 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
800 args[i].value, args[i].unsignedp);
802 /* If the value is expensive, and we are inside an appropriately
803 short loop, put the value into a pseudo and then put the pseudo
804 into the hard reg.
806 For small register classes, also do this if this call uses
807 register parameters. This is to avoid reload conflicts while
808 loading the parameters registers. */
810 if ((! (GET_CODE (args[i].value) == REG
811 || (GET_CODE (args[i].value) == SUBREG
812 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
813 && args[i].mode != BLKmode
814 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
815 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
816 || preserve_subexpressions_p ()))
817 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
821 #ifdef REG_PARM_STACK_SPACE
823 /* The argument list is the property of the called routine and it
824 may clobber it. If the fixed area has been used for previous
825 parameters, we must save and restore it. */
827 static rtx
828 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
830 int low;
831 int high;
833 /* Compute the boundary of the area that needs to be saved, if any. */
834 high = reg_parm_stack_space;
835 #ifdef ARGS_GROW_DOWNWARD
836 high += 1;
837 #endif
838 if (high > highest_outgoing_arg_in_use)
839 high = highest_outgoing_arg_in_use;
841 for (low = 0; low < high; low++)
842 if (stack_usage_map[low] != 0)
844 int num_to_save;
845 enum machine_mode save_mode;
846 int delta;
847 rtx stack_area;
848 rtx save_area;
850 while (stack_usage_map[--high] == 0)
853 *low_to_save = low;
854 *high_to_save = high;
856 num_to_save = high - low + 1;
857 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
859 /* If we don't have the required alignment, must do this
860 in BLKmode. */
861 if ((low & (MIN (GET_MODE_SIZE (save_mode),
862 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
863 save_mode = BLKmode;
865 #ifdef ARGS_GROW_DOWNWARD
866 delta = -high;
867 #else
868 delta = low;
869 #endif
870 stack_area = gen_rtx_MEM (save_mode,
871 memory_address (save_mode,
872 plus_constant (argblock,
873 delta)));
875 set_mem_align (stack_area, PARM_BOUNDARY);
876 if (save_mode == BLKmode)
878 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
879 emit_block_move (validize_mem (save_area), stack_area,
880 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
882 else
884 save_area = gen_reg_rtx (save_mode);
885 emit_move_insn (save_area, stack_area);
888 return save_area;
891 return NULL_RTX;
894 static void
895 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
897 enum machine_mode save_mode = GET_MODE (save_area);
898 int delta;
899 rtx stack_area;
901 #ifdef ARGS_GROW_DOWNWARD
902 delta = -high_to_save;
903 #else
904 delta = low_to_save;
905 #endif
906 stack_area = gen_rtx_MEM (save_mode,
907 memory_address (save_mode,
908 plus_constant (argblock, delta)));
909 set_mem_align (stack_area, PARM_BOUNDARY);
911 if (save_mode != BLKmode)
912 emit_move_insn (stack_area, save_area);
913 else
914 emit_block_move (stack_area, validize_mem (save_area),
915 GEN_INT (high_to_save - low_to_save + 1),
916 BLOCK_OP_CALL_PARM);
918 #endif /* REG_PARM_STACK_SPACE */
920 /* If any elements in ARGS refer to parameters that are to be passed in
921 registers, but not in memory, and whose alignment does not permit a
922 direct copy into registers. Copy the values into a group of pseudos
923 which we will later copy into the appropriate hard registers.
925 Pseudos for each unaligned argument will be stored into the array
926 args[argnum].aligned_regs. The caller is responsible for deallocating
927 the aligned_regs array if it is nonzero. */
929 static void
930 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
932 int i, j;
934 for (i = 0; i < num_actuals; i++)
935 if (args[i].reg != 0 && ! args[i].pass_on_stack
936 && args[i].mode == BLKmode
937 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
938 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
940 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
941 int nregs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
942 int endian_correction = 0;
944 args[i].n_aligned_regs = args[i].partial ? args[i].partial : nregs;
945 args[i].aligned_regs = xmalloc (sizeof (rtx) * args[i].n_aligned_regs);
947 /* Structures smaller than a word are normally aligned to the
948 least significant byte. On a BYTES_BIG_ENDIAN machine,
949 this means we must skip the empty high order bytes when
950 calculating the bit offset. */
951 if (bytes < UNITS_PER_WORD
952 #ifdef BLOCK_REG_PADDING
953 && (BLOCK_REG_PADDING (args[i].mode,
954 TREE_TYPE (args[i].tree_value), 1)
955 == downward)
956 #else
957 && BYTES_BIG_ENDIAN
958 #endif
960 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
962 for (j = 0; j < args[i].n_aligned_regs; j++)
964 rtx reg = gen_reg_rtx (word_mode);
965 rtx word = operand_subword_force (args[i].value, j, BLKmode);
966 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
968 args[i].aligned_regs[j] = reg;
969 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
970 word_mode, word_mode, BITS_PER_WORD);
972 /* There is no need to restrict this code to loading items
973 in TYPE_ALIGN sized hunks. The bitfield instructions can
974 load up entire word sized registers efficiently.
976 ??? This may not be needed anymore.
977 We use to emit a clobber here but that doesn't let later
978 passes optimize the instructions we emit. By storing 0 into
979 the register later passes know the first AND to zero out the
980 bitfield being set in the register is unnecessary. The store
981 of 0 will be deleted as will at least the first AND. */
983 emit_move_insn (reg, const0_rtx);
985 bytes -= bitsize / BITS_PER_UNIT;
986 store_bit_field (reg, bitsize, endian_correction, word_mode,
987 word, BITS_PER_WORD);
992 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
993 ACTPARMS.
995 NUM_ACTUALS is the total number of parameters.
997 N_NAMED_ARGS is the total number of named arguments.
999 FNDECL is the tree code for the target of this call (if known)
1001 ARGS_SO_FAR holds state needed by the target to know where to place
1002 the next argument.
1004 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1005 for arguments which are passed in registers.
1007 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1008 and may be modified by this routine.
1010 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1011 flags which may may be modified by this routine. */
1013 static void
1014 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
1015 struct arg_data *args,
1016 struct args_size *args_size,
1017 int n_named_args ATTRIBUTE_UNUSED,
1018 tree actparms, tree fndecl,
1019 CUMULATIVE_ARGS *args_so_far,
1020 int reg_parm_stack_space,
1021 rtx *old_stack_level, int *old_pending_adj,
1022 int *must_preallocate, int *ecf_flags)
1024 /* 1 if scanning parms front to back, -1 if scanning back to front. */
1025 int inc;
1027 /* Count arg position in order args appear. */
1028 int argpos;
1030 int i;
1031 tree p;
1033 args_size->constant = 0;
1034 args_size->var = 0;
1036 /* In this loop, we consider args in the order they are written.
1037 We fill up ARGS from the front or from the back if necessary
1038 so that in any case the first arg to be pushed ends up at the front. */
1040 if (PUSH_ARGS_REVERSED)
1042 i = num_actuals - 1, inc = -1;
1043 /* In this case, must reverse order of args
1044 so that we compute and push the last arg first. */
1046 else
1048 i = 0, inc = 1;
1051 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1052 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
1054 tree type = TREE_TYPE (TREE_VALUE (p));
1055 int unsignedp;
1056 enum machine_mode mode;
1058 args[i].tree_value = TREE_VALUE (p);
1060 /* Replace erroneous argument with constant zero. */
1061 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1062 args[i].tree_value = integer_zero_node, type = integer_type_node;
1064 /* If TYPE is a transparent union, pass things the way we would
1065 pass the first field of the union. We have already verified that
1066 the modes are the same. */
1067 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
1068 type = TREE_TYPE (TYPE_FIELDS (type));
1070 /* Decide where to pass this arg.
1072 args[i].reg is nonzero if all or part is passed in registers.
1074 args[i].partial is nonzero if part but not all is passed in registers,
1075 and the exact value says how many words are passed in registers.
1077 args[i].pass_on_stack is nonzero if the argument must at least be
1078 computed on the stack. It may then be loaded back into registers
1079 if args[i].reg is nonzero.
1081 These decisions are driven by the FUNCTION_... macros and must agree
1082 with those made by function.c. */
1084 /* See if this argument should be passed by invisible reference. */
1085 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
1086 || TREE_ADDRESSABLE (type)
1087 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1088 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
1089 type, argpos < n_named_args)
1090 #endif
1093 /* If we're compiling a thunk, pass through invisible
1094 references instead of making a copy. */
1095 if (current_function_is_thunk
1096 #ifdef FUNCTION_ARG_CALLEE_COPIES
1097 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
1098 type, argpos < n_named_args)
1099 /* If it's in a register, we must make a copy of it too. */
1100 /* ??? Is this a sufficient test? Is there a better one? */
1101 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1102 && REG_P (DECL_RTL (args[i].tree_value)))
1103 && ! TREE_ADDRESSABLE (type))
1104 #endif
1107 /* C++ uses a TARGET_EXPR to indicate that we want to make a
1108 new object from the argument. If we are passing by
1109 invisible reference, the callee will do that for us, so we
1110 can strip off the TARGET_EXPR. This is not always safe,
1111 but it is safe in the only case where this is a useful
1112 optimization; namely, when the argument is a plain object.
1113 In that case, the frontend is just asking the backend to
1114 make a bitwise copy of the argument. */
1116 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
1117 && (DECL_P (TREE_OPERAND (args[i].tree_value, 1)))
1118 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1119 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1121 args[i].tree_value = build1 (ADDR_EXPR,
1122 build_pointer_type (type),
1123 args[i].tree_value);
1124 type = build_pointer_type (type);
1126 else if (TREE_CODE (args[i].tree_value) == TARGET_EXPR)
1128 /* In the V3 C++ ABI, parameters are destroyed in the caller.
1129 We implement this by passing the address of the temporary
1130 rather than expanding it into another allocated slot. */
1131 args[i].tree_value = build1 (ADDR_EXPR,
1132 build_pointer_type (type),
1133 args[i].tree_value);
1134 type = build_pointer_type (type);
1136 else
1138 /* We make a copy of the object and pass the address to the
1139 function being called. */
1140 rtx copy;
1142 if (!COMPLETE_TYPE_P (type)
1143 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1144 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1145 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1146 STACK_CHECK_MAX_VAR_SIZE))))
1148 /* This is a variable-sized object. Make space on the stack
1149 for it. */
1150 rtx size_rtx = expr_size (TREE_VALUE (p));
1152 if (*old_stack_level == 0)
1154 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1155 *old_pending_adj = pending_stack_adjust;
1156 pending_stack_adjust = 0;
1159 copy = gen_rtx_MEM (BLKmode,
1160 allocate_dynamic_stack_space
1161 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1162 set_mem_attributes (copy, type, 1);
1164 else
1165 copy = assign_temp (type, 0, 1, 0);
1167 store_expr (args[i].tree_value, copy, 0);
1168 *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1170 args[i].tree_value = build1 (ADDR_EXPR,
1171 build_pointer_type (type),
1172 make_tree (type, copy));
1173 type = build_pointer_type (type);
1177 mode = TYPE_MODE (type);
1178 unsignedp = TREE_UNSIGNED (type);
1180 #ifdef PROMOTE_FUNCTION_ARGS
1181 mode = promote_mode (type, mode, &unsignedp, 1);
1182 #endif
1184 args[i].unsignedp = unsignedp;
1185 args[i].mode = mode;
1187 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1188 argpos < n_named_args);
1189 #ifdef FUNCTION_INCOMING_ARG
1190 /* If this is a sibling call and the machine has register windows, the
1191 register window has to be unwinded before calling the routine, so
1192 arguments have to go into the incoming registers. */
1193 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1194 argpos < n_named_args);
1195 #else
1196 args[i].tail_call_reg = args[i].reg;
1197 #endif
1199 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1200 if (args[i].reg)
1201 args[i].partial
1202 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1203 argpos < n_named_args);
1204 #endif
1206 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1208 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1209 it means that we are to pass this arg in the register(s) designated
1210 by the PARALLEL, but also to pass it in the stack. */
1211 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1212 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1213 args[i].pass_on_stack = 1;
1215 /* If this is an addressable type, we must preallocate the stack
1216 since we must evaluate the object into its final location.
1218 If this is to be passed in both registers and the stack, it is simpler
1219 to preallocate. */
1220 if (TREE_ADDRESSABLE (type)
1221 || (args[i].pass_on_stack && args[i].reg != 0))
1222 *must_preallocate = 1;
1224 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1225 we cannot consider this function call constant. */
1226 if (TREE_ADDRESSABLE (type))
1227 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1229 /* Compute the stack-size of this argument. */
1230 if (args[i].reg == 0 || args[i].partial != 0
1231 || reg_parm_stack_space > 0
1232 || args[i].pass_on_stack)
1233 locate_and_pad_parm (mode, type,
1234 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1236 #else
1237 args[i].reg != 0,
1238 #endif
1239 args[i].pass_on_stack ? 0 : args[i].partial,
1240 fndecl, args_size, &args[i].locate);
1242 /* Update ARGS_SIZE, the total stack space for args so far. */
1244 args_size->constant += args[i].locate.size.constant;
1245 if (args[i].locate.size.var)
1246 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1248 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1249 have been used, etc. */
1251 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1252 argpos < n_named_args);
1256 /* Update ARGS_SIZE to contain the total size for the argument block.
1257 Return the original constant component of the argument block's size.
1259 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1260 for arguments passed in registers. */
1262 static int
1263 compute_argument_block_size (int reg_parm_stack_space,
1264 struct args_size *args_size,
1265 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1267 int unadjusted_args_size = args_size->constant;
1269 /* For accumulate outgoing args mode we don't need to align, since the frame
1270 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1271 backends from generating misaligned frame sizes. */
1272 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1273 preferred_stack_boundary = STACK_BOUNDARY;
1275 /* Compute the actual size of the argument block required. The variable
1276 and constant sizes must be combined, the size may have to be rounded,
1277 and there may be a minimum required size. */
1279 if (args_size->var)
1281 args_size->var = ARGS_SIZE_TREE (*args_size);
1282 args_size->constant = 0;
1284 preferred_stack_boundary /= BITS_PER_UNIT;
1285 if (preferred_stack_boundary > 1)
1287 /* We don't handle this case yet. To handle it correctly we have
1288 to add the delta, round and subtract the delta.
1289 Currently no machine description requires this support. */
1290 if (stack_pointer_delta & (preferred_stack_boundary - 1))
1291 abort ();
1292 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1295 if (reg_parm_stack_space > 0)
1297 args_size->var
1298 = size_binop (MAX_EXPR, args_size->var,
1299 ssize_int (reg_parm_stack_space));
1301 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1302 /* The area corresponding to register parameters is not to count in
1303 the size of the block we need. So make the adjustment. */
1304 args_size->var
1305 = size_binop (MINUS_EXPR, args_size->var,
1306 ssize_int (reg_parm_stack_space));
1307 #endif
1310 else
1312 preferred_stack_boundary /= BITS_PER_UNIT;
1313 if (preferred_stack_boundary < 1)
1314 preferred_stack_boundary = 1;
1315 args_size->constant = (((args_size->constant
1316 + stack_pointer_delta
1317 + preferred_stack_boundary - 1)
1318 / preferred_stack_boundary
1319 * preferred_stack_boundary)
1320 - stack_pointer_delta);
1322 args_size->constant = MAX (args_size->constant,
1323 reg_parm_stack_space);
1325 #ifdef MAYBE_REG_PARM_STACK_SPACE
1326 if (reg_parm_stack_space == 0)
1327 args_size->constant = 0;
1328 #endif
1330 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1331 args_size->constant -= reg_parm_stack_space;
1332 #endif
1334 return unadjusted_args_size;
1337 /* Precompute parameters as needed for a function call.
1339 FLAGS is mask of ECF_* constants.
1341 NUM_ACTUALS is the number of arguments.
1343 ARGS is an array containing information for each argument; this
1344 routine fills in the INITIAL_VALUE and VALUE fields for each
1345 precomputed argument. */
1347 static void
1348 precompute_arguments (int flags, int num_actuals, struct arg_data *args)
1350 int i;
1352 /* If this function call is cse'able, precompute all the parameters.
1353 Note that if the parameter is constructed into a temporary, this will
1354 cause an additional copy because the parameter will be constructed
1355 into a temporary location and then copied into the outgoing arguments.
1356 If a parameter contains a call to alloca and this function uses the
1357 stack, precompute the parameter. */
1359 /* If we preallocated the stack space, and some arguments must be passed
1360 on the stack, then we must precompute any parameter which contains a
1361 function call which will store arguments on the stack.
1362 Otherwise, evaluating the parameter may clobber previous parameters
1363 which have already been stored into the stack. (we have code to avoid
1364 such case by saving the outgoing stack arguments, but it results in
1365 worse code) */
1367 for (i = 0; i < num_actuals; i++)
1368 if ((flags & ECF_LIBCALL_BLOCK)
1369 || calls_function (args[i].tree_value, !ACCUMULATE_OUTGOING_ARGS))
1371 enum machine_mode mode;
1373 /* If this is an addressable type, we cannot pre-evaluate it. */
1374 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1375 abort ();
1377 args[i].value
1378 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1380 /* ANSI doesn't require a sequence point here,
1381 but PCC has one, so this will avoid some problems. */
1382 emit_queue ();
1384 args[i].initial_value = args[i].value
1385 = protect_from_queue (args[i].value, 0);
1387 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1388 if (mode != args[i].mode)
1390 args[i].value
1391 = convert_modes (args[i].mode, mode,
1392 args[i].value, args[i].unsignedp);
1393 #ifdef PROMOTE_FOR_CALL_ONLY
1394 /* CSE will replace this only if it contains args[i].value
1395 pseudo, so convert it down to the declared mode using
1396 a SUBREG. */
1397 if (GET_CODE (args[i].value) == REG
1398 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1400 args[i].initial_value
1401 = gen_lowpart_SUBREG (mode, args[i].value);
1402 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1403 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1404 args[i].unsignedp);
1406 #endif
1411 /* Given the current state of MUST_PREALLOCATE and information about
1412 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1413 compute and return the final value for MUST_PREALLOCATE. */
1415 static int
1416 finalize_must_preallocate (int must_preallocate, int num_actuals, struct arg_data *args, struct args_size *args_size)
1418 /* See if we have or want to preallocate stack space.
1420 If we would have to push a partially-in-regs parm
1421 before other stack parms, preallocate stack space instead.
1423 If the size of some parm is not a multiple of the required stack
1424 alignment, we must preallocate.
1426 If the total size of arguments that would otherwise create a copy in
1427 a temporary (such as a CALL) is more than half the total argument list
1428 size, preallocation is faster.
1430 Another reason to preallocate is if we have a machine (like the m88k)
1431 where stack alignment is required to be maintained between every
1432 pair of insns, not just when the call is made. However, we assume here
1433 that such machines either do not have push insns (and hence preallocation
1434 would occur anyway) or the problem is taken care of with
1435 PUSH_ROUNDING. */
1437 if (! must_preallocate)
1439 int partial_seen = 0;
1440 int copy_to_evaluate_size = 0;
1441 int i;
1443 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1445 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1446 partial_seen = 1;
1447 else if (partial_seen && args[i].reg == 0)
1448 must_preallocate = 1;
1450 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1451 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1452 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1453 || TREE_CODE (args[i].tree_value) == COND_EXPR
1454 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1455 copy_to_evaluate_size
1456 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1459 if (copy_to_evaluate_size * 2 >= args_size->constant
1460 && args_size->constant > 0)
1461 must_preallocate = 1;
1463 return must_preallocate;
1466 /* If we preallocated stack space, compute the address of each argument
1467 and store it into the ARGS array.
1469 We need not ensure it is a valid memory address here; it will be
1470 validized when it is used.
1472 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1474 static void
1475 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1477 if (argblock)
1479 rtx arg_reg = argblock;
1480 int i, arg_offset = 0;
1482 if (GET_CODE (argblock) == PLUS)
1483 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1485 for (i = 0; i < num_actuals; i++)
1487 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1488 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1489 rtx addr;
1491 /* Skip this parm if it will not be passed on the stack. */
1492 if (! args[i].pass_on_stack && args[i].reg != 0)
1493 continue;
1495 if (GET_CODE (offset) == CONST_INT)
1496 addr = plus_constant (arg_reg, INTVAL (offset));
1497 else
1498 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1500 addr = plus_constant (addr, arg_offset);
1501 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1502 set_mem_align (args[i].stack, PARM_BOUNDARY);
1503 set_mem_attributes (args[i].stack,
1504 TREE_TYPE (args[i].tree_value), 1);
1506 if (GET_CODE (slot_offset) == CONST_INT)
1507 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1508 else
1509 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1511 addr = plus_constant (addr, arg_offset);
1512 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1513 set_mem_align (args[i].stack_slot, PARM_BOUNDARY);
1514 set_mem_attributes (args[i].stack_slot,
1515 TREE_TYPE (args[i].tree_value), 1);
1517 /* Function incoming arguments may overlap with sibling call
1518 outgoing arguments and we cannot allow reordering of reads
1519 from function arguments with stores to outgoing arguments
1520 of sibling calls. */
1521 set_mem_alias_set (args[i].stack, 0);
1522 set_mem_alias_set (args[i].stack_slot, 0);
1527 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1528 in a call instruction.
1530 FNDECL is the tree node for the target function. For an indirect call
1531 FNDECL will be NULL_TREE.
1533 ADDR is the operand 0 of CALL_EXPR for this call. */
1535 static rtx
1536 rtx_for_function_call (tree fndecl, tree addr)
1538 rtx funexp;
1540 /* Get the function to call, in the form of RTL. */
1541 if (fndecl)
1543 /* If this is the first use of the function, see if we need to
1544 make an external definition for it. */
1545 if (! TREE_USED (fndecl))
1547 assemble_external (fndecl);
1548 TREE_USED (fndecl) = 1;
1551 /* Get a SYMBOL_REF rtx for the function address. */
1552 funexp = XEXP (DECL_RTL (fndecl), 0);
1554 else
1555 /* Generate an rtx (probably a pseudo-register) for the address. */
1557 push_temp_slots ();
1558 funexp = expand_expr (addr, NULL_RTX, VOIDmode, 0);
1559 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1560 emit_queue ();
1562 return funexp;
1565 /* Do the register loads required for any wholly-register parms or any
1566 parms which are passed both on the stack and in a register. Their
1567 expressions were already evaluated.
1569 Mark all register-parms as living through the call, putting these USE
1570 insns in the CALL_INSN_FUNCTION_USAGE field.
1572 When IS_SIBCALL, perform the check_sibcall_overlap_argument_overlap
1573 checking, setting *SIBCALL_FAILURE if appropriate. */
1575 static void
1576 load_register_parameters (struct arg_data *args, int num_actuals,
1577 rtx *call_fusage, int flags, int is_sibcall,
1578 int *sibcall_failure)
1580 int i, j;
1582 #ifdef LOAD_ARGS_REVERSED
1583 for (i = num_actuals - 1; i >= 0; i--)
1584 #else
1585 for (i = 0; i < num_actuals; i++)
1586 #endif
1588 rtx reg = ((flags & ECF_SIBCALL)
1589 ? args[i].tail_call_reg : args[i].reg);
1590 if (reg)
1592 int partial = args[i].partial;
1593 int nregs;
1594 int size = 0;
1595 rtx before_arg = get_last_insn ();
1596 /* Set to non-negative if must move a word at a time, even if just
1597 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1598 we just use a normal move insn. This value can be zero if the
1599 argument is a zero size structure with no fields. */
1600 nregs = -1;
1601 if (partial)
1602 nregs = partial;
1603 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1605 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1606 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1608 else
1609 size = GET_MODE_SIZE (args[i].mode);
1611 /* Handle calls that pass values in multiple non-contiguous
1612 locations. The Irix 6 ABI has examples of this. */
1614 if (GET_CODE (reg) == PARALLEL)
1616 tree type = TREE_TYPE (args[i].tree_value);
1617 emit_group_load (reg, args[i].value, type,
1618 int_size_in_bytes (type));
1621 /* If simple case, just do move. If normal partial, store_one_arg
1622 has already loaded the register for us. In all other cases,
1623 load the register(s) from memory. */
1625 else if (nregs == -1
1626 #ifdef BLOCK_REG_PADDING
1627 && !(size < UNITS_PER_WORD
1628 && (args[i].locate.where_pad
1629 == (BYTES_BIG_ENDIAN ? upward : downward)))
1630 #endif
1632 emit_move_insn (reg, args[i].value);
1634 /* If we have pre-computed the values to put in the registers in
1635 the case of non-aligned structures, copy them in now. */
1637 else if (args[i].n_aligned_regs != 0)
1638 for (j = 0; j < args[i].n_aligned_regs; j++)
1639 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1640 args[i].aligned_regs[j]);
1642 else if (partial == 0 || args[i].pass_on_stack)
1644 rtx mem = validize_mem (args[i].value);
1646 #ifdef BLOCK_REG_PADDING
1647 /* Handle case where we have a value that needs shifting
1648 up to the msb. eg. a QImode value and we're padding
1649 upward on a BYTES_BIG_ENDIAN machine. */
1650 if (nregs == -1)
1652 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1653 rtx x;
1654 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1655 x = expand_binop (word_mode, ashl_optab, mem,
1656 GEN_INT (shift), ri, 1, OPTAB_WIDEN);
1657 if (x != ri)
1658 emit_move_insn (ri, x);
1661 /* Handle a BLKmode that needs shifting. */
1662 else if (nregs == 1 && size < UNITS_PER_WORD
1663 && args[i].locate.where_pad == downward)
1665 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1666 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1667 rtx x = gen_reg_rtx (word_mode);
1668 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1669 optab dir = BYTES_BIG_ENDIAN ? lshr_optab : ashl_optab;
1671 emit_move_insn (x, tem);
1672 x = expand_binop (word_mode, dir, x, GEN_INT (shift),
1673 ri, 1, OPTAB_WIDEN);
1674 if (x != ri)
1675 emit_move_insn (ri, x);
1677 else
1678 #endif
1679 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1682 /* When a parameter is a block, and perhaps in other cases, it is
1683 possible that it did a load from an argument slot that was
1684 already clobbered. */
1685 if (is_sibcall
1686 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1687 *sibcall_failure = 1;
1689 /* Handle calls that pass values in multiple non-contiguous
1690 locations. The Irix 6 ABI has examples of this. */
1691 if (GET_CODE (reg) == PARALLEL)
1692 use_group_regs (call_fusage, reg);
1693 else if (nregs == -1)
1694 use_reg (call_fusage, reg);
1695 else
1696 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1701 /* Try to integrate function. See expand_inline_function for documentation
1702 about the parameters. */
1704 static rtx
1705 try_to_integrate (tree fndecl, tree actparms, rtx target, int ignore,
1706 tree type, rtx structure_value_addr)
1708 rtx temp;
1709 rtx before_call;
1710 int i;
1711 rtx old_stack_level = 0;
1712 int reg_parm_stack_space = 0;
1714 #ifdef REG_PARM_STACK_SPACE
1715 #ifdef MAYBE_REG_PARM_STACK_SPACE
1716 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1717 #else
1718 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1719 #endif
1720 #endif
1722 before_call = get_last_insn ();
1724 timevar_push (TV_INTEGRATION);
1726 temp = expand_inline_function (fndecl, actparms, target,
1727 ignore, type,
1728 structure_value_addr);
1730 timevar_pop (TV_INTEGRATION);
1732 /* If inlining succeeded, return. */
1733 if (temp != (rtx) (size_t) - 1)
1735 if (ACCUMULATE_OUTGOING_ARGS)
1737 /* If the outgoing argument list must be preserved, push
1738 the stack before executing the inlined function if it
1739 makes any calls. */
1741 i = reg_parm_stack_space;
1742 if (i > highest_outgoing_arg_in_use)
1743 i = highest_outgoing_arg_in_use;
1744 while (--i >= 0 && stack_usage_map[i] == 0)
1747 if (stack_arg_under_construction || i >= 0)
1749 rtx first_insn
1750 = before_call ? NEXT_INSN (before_call) : get_insns ();
1751 rtx insn = NULL_RTX, seq;
1753 /* Look for a call in the inline function code.
1754 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1755 nonzero then there is a call and it is not necessary
1756 to scan the insns. */
1758 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
1759 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1760 if (GET_CODE (insn) == CALL_INSN)
1761 break;
1763 if (insn)
1765 /* Reserve enough stack space so that the largest
1766 argument list of any function call in the inline
1767 function does not overlap the argument list being
1768 evaluated. This is usually an overestimate because
1769 allocate_dynamic_stack_space reserves space for an
1770 outgoing argument list in addition to the requested
1771 space, but there is no way to ask for stack space such
1772 that an argument list of a certain length can be
1773 safely constructed.
1775 Add the stack space reserved for register arguments, if
1776 any, in the inline function. What is really needed is the
1777 largest value of reg_parm_stack_space in the inline
1778 function, but that is not available. Using the current
1779 value of reg_parm_stack_space is wrong, but gives
1780 correct results on all supported machines. */
1782 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
1783 + reg_parm_stack_space);
1785 start_sequence ();
1786 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1787 allocate_dynamic_stack_space (GEN_INT (adjust),
1788 NULL_RTX, BITS_PER_UNIT);
1789 seq = get_insns ();
1790 end_sequence ();
1791 emit_insn_before (seq, first_insn);
1792 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1797 /* If the result is equivalent to TARGET, return TARGET to simplify
1798 checks in store_expr. They can be equivalent but not equal in the
1799 case of a function that returns BLKmode. */
1800 if (temp != target && rtx_equal_p (temp, target))
1801 return target;
1802 return temp;
1805 /* If inlining failed, mark FNDECL as needing to be compiled
1806 separately after all. If function was declared inline,
1807 give a warning. */
1808 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1809 && optimize > 0 && !TREE_ADDRESSABLE (fndecl))
1811 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1812 warning ("called from here");
1814 (*lang_hooks.mark_addressable) (fndecl);
1815 return (rtx) (size_t) - 1;
1818 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1819 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1820 bytes, then we would need to push some additional bytes to pad the
1821 arguments. So, we compute an adjust to the stack pointer for an
1822 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1823 bytes. Then, when the arguments are pushed the stack will be perfectly
1824 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1825 be popped after the call. Returns the adjustment. */
1827 static int
1828 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1829 struct args_size *args_size,
1830 int preferred_unit_stack_boundary)
1832 /* The number of bytes to pop so that the stack will be
1833 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1834 HOST_WIDE_INT adjustment;
1835 /* The alignment of the stack after the arguments are pushed, if we
1836 just pushed the arguments without adjust the stack here. */
1837 HOST_WIDE_INT unadjusted_alignment;
1839 unadjusted_alignment
1840 = ((stack_pointer_delta + unadjusted_args_size)
1841 % preferred_unit_stack_boundary);
1843 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1844 as possible -- leaving just enough left to cancel out the
1845 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1846 PENDING_STACK_ADJUST is non-negative, and congruent to
1847 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1849 /* Begin by trying to pop all the bytes. */
1850 unadjusted_alignment
1851 = (unadjusted_alignment
1852 - (pending_stack_adjust % preferred_unit_stack_boundary));
1853 adjustment = pending_stack_adjust;
1854 /* Push enough additional bytes that the stack will be aligned
1855 after the arguments are pushed. */
1856 if (preferred_unit_stack_boundary > 1)
1858 if (unadjusted_alignment > 0)
1859 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1860 else
1861 adjustment += unadjusted_alignment;
1864 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1865 bytes after the call. The right number is the entire
1866 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1867 by the arguments in the first place. */
1868 args_size->constant
1869 = pending_stack_adjust - adjustment + unadjusted_args_size;
1871 return adjustment;
1874 /* Scan X expression if it does not dereference any argument slots
1875 we already clobbered by tail call arguments (as noted in stored_args_map
1876 bitmap).
1877 Return nonzero if X expression dereferences such argument slots,
1878 zero otherwise. */
1880 static int
1881 check_sibcall_argument_overlap_1 (rtx x)
1883 RTX_CODE code;
1884 int i, j;
1885 unsigned int k;
1886 const char *fmt;
1888 if (x == NULL_RTX)
1889 return 0;
1891 code = GET_CODE (x);
1893 if (code == MEM)
1895 if (XEXP (x, 0) == current_function_internal_arg_pointer)
1896 i = 0;
1897 else if (GET_CODE (XEXP (x, 0)) == PLUS
1898 && XEXP (XEXP (x, 0), 0) ==
1899 current_function_internal_arg_pointer
1900 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
1901 i = INTVAL (XEXP (XEXP (x, 0), 1));
1902 else
1903 return 0;
1905 #ifdef ARGS_GROW_DOWNWARD
1906 i = -i - GET_MODE_SIZE (GET_MODE (x));
1907 #endif
1909 for (k = 0; k < GET_MODE_SIZE (GET_MODE (x)); k++)
1910 if (i + k < stored_args_map->n_bits
1911 && TEST_BIT (stored_args_map, i + k))
1912 return 1;
1914 return 0;
1917 /* Scan all subexpressions. */
1918 fmt = GET_RTX_FORMAT (code);
1919 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1921 if (*fmt == 'e')
1923 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1924 return 1;
1926 else if (*fmt == 'E')
1928 for (j = 0; j < XVECLEN (x, i); j++)
1929 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1930 return 1;
1933 return 0;
1936 /* Scan sequence after INSN if it does not dereference any argument slots
1937 we already clobbered by tail call arguments (as noted in stored_args_map
1938 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1939 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1940 should be 0). Return nonzero if sequence after INSN dereferences such argument
1941 slots, zero otherwise. */
1943 static int
1944 check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
1946 int low, high;
1948 if (insn == NULL_RTX)
1949 insn = get_insns ();
1950 else
1951 insn = NEXT_INSN (insn);
1953 for (; insn; insn = NEXT_INSN (insn))
1954 if (INSN_P (insn)
1955 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
1956 break;
1958 if (mark_stored_args_map)
1960 #ifdef ARGS_GROW_DOWNWARD
1961 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
1962 #else
1963 low = arg->locate.slot_offset.constant;
1964 #endif
1966 for (high = low + arg->locate.size.constant; low < high; low++)
1967 SET_BIT (stored_args_map, low);
1969 return insn != NULL_RTX;
1972 static tree
1973 fix_unsafe_tree (tree t)
1975 switch (unsafe_for_reeval (t))
1977 case 0: /* Safe. */
1978 break;
1980 case 1: /* Mildly unsafe. */
1981 t = unsave_expr (t);
1982 break;
1984 case 2: /* Wildly unsafe. */
1986 tree var = build_decl (VAR_DECL, NULL_TREE,
1987 TREE_TYPE (t));
1988 SET_DECL_RTL (var,
1989 expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL));
1990 t = var;
1992 break;
1994 default:
1995 abort ();
1997 return t;
2000 /* Generate all the code for a function call
2001 and return an rtx for its value.
2002 Store the value in TARGET (specified as an rtx) if convenient.
2003 If the value is stored in TARGET then TARGET is returned.
2004 If IGNORE is nonzero, then we ignore the value of the function call. */
2007 expand_call (tree exp, rtx target, int ignore)
2009 /* Nonzero if we are currently expanding a call. */
2010 static int currently_expanding_call = 0;
2012 /* List of actual parameters. */
2013 tree actparms = TREE_OPERAND (exp, 1);
2014 /* RTX for the function to be called. */
2015 rtx funexp;
2016 /* Sequence of insns to perform a tail recursive "call". */
2017 rtx tail_recursion_insns = NULL_RTX;
2018 /* Sequence of insns to perform a normal "call". */
2019 rtx normal_call_insns = NULL_RTX;
2020 /* Sequence of insns to perform a tail recursive "call". */
2021 rtx tail_call_insns = NULL_RTX;
2022 /* Data type of the function. */
2023 tree funtype;
2024 tree type_arg_types;
2025 /* Declaration of the function being called,
2026 or 0 if the function is computed (not known by name). */
2027 tree fndecl = 0;
2028 rtx insn;
2029 int try_tail_call = 1;
2030 int try_tail_recursion = 1;
2031 int pass;
2033 /* Register in which non-BLKmode value will be returned,
2034 or 0 if no value or if value is BLKmode. */
2035 rtx valreg;
2036 /* Address where we should return a BLKmode value;
2037 0 if value not BLKmode. */
2038 rtx structure_value_addr = 0;
2039 /* Nonzero if that address is being passed by treating it as
2040 an extra, implicit first parameter. Otherwise,
2041 it is passed by being copied directly into struct_value_rtx. */
2042 int structure_value_addr_parm = 0;
2043 /* Size of aggregate value wanted, or zero if none wanted
2044 or if we are using the non-reentrant PCC calling convention
2045 or expecting the value in registers. */
2046 HOST_WIDE_INT struct_value_size = 0;
2047 /* Nonzero if called function returns an aggregate in memory PCC style,
2048 by returning the address of where to find it. */
2049 int pcc_struct_value = 0;
2051 /* Number of actual parameters in this call, including struct value addr. */
2052 int num_actuals;
2053 /* Number of named args. Args after this are anonymous ones
2054 and they must all go on the stack. */
2055 int n_named_args;
2057 /* Vector of information about each argument.
2058 Arguments are numbered in the order they will be pushed,
2059 not the order they are written. */
2060 struct arg_data *args;
2062 /* Total size in bytes of all the stack-parms scanned so far. */
2063 struct args_size args_size;
2064 struct args_size adjusted_args_size;
2065 /* Size of arguments before any adjustments (such as rounding). */
2066 int unadjusted_args_size;
2067 /* Data on reg parms scanned so far. */
2068 CUMULATIVE_ARGS args_so_far;
2069 /* Nonzero if a reg parm has been scanned. */
2070 int reg_parm_seen;
2071 /* Nonzero if this is an indirect function call. */
2073 /* Nonzero if we must avoid push-insns in the args for this call.
2074 If stack space is allocated for register parameters, but not by the
2075 caller, then it is preallocated in the fixed part of the stack frame.
2076 So the entire argument block must then be preallocated (i.e., we
2077 ignore PUSH_ROUNDING in that case). */
2079 int must_preallocate = !PUSH_ARGS;
2081 /* Size of the stack reserved for parameter registers. */
2082 int reg_parm_stack_space = 0;
2084 /* Address of space preallocated for stack parms
2085 (on machines that lack push insns), or 0 if space not preallocated. */
2086 rtx argblock = 0;
2088 /* Mask of ECF_ flags. */
2089 int flags = 0;
2090 /* Nonzero if this is a call to an inline function. */
2091 int is_integrable = 0;
2092 #ifdef REG_PARM_STACK_SPACE
2093 /* Define the boundary of the register parm stack space that needs to be
2094 saved, if any. */
2095 int low_to_save, high_to_save;
2096 rtx save_area = 0; /* Place that it is saved */
2097 #endif
2099 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2100 char *initial_stack_usage_map = stack_usage_map;
2102 int old_stack_allocated;
2104 /* State variables to track stack modifications. */
2105 rtx old_stack_level = 0;
2106 int old_stack_arg_under_construction = 0;
2107 int old_pending_adj = 0;
2108 int old_inhibit_defer_pop = inhibit_defer_pop;
2110 /* Some stack pointer alterations we make are performed via
2111 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2112 which we then also need to save/restore along the way. */
2113 int old_stack_pointer_delta = 0;
2115 rtx call_fusage;
2116 tree p = TREE_OPERAND (exp, 0);
2117 tree addr = TREE_OPERAND (exp, 0);
2118 int i;
2119 /* The alignment of the stack, in bits. */
2120 HOST_WIDE_INT preferred_stack_boundary;
2121 /* The alignment of the stack, in bytes. */
2122 HOST_WIDE_INT preferred_unit_stack_boundary;
2124 /* See if this is "nothrow" function call. */
2125 if (TREE_NOTHROW (exp))
2126 flags |= ECF_NOTHROW;
2128 /* See if we can find a DECL-node for the actual function.
2129 As a result, decide whether this is a call to an integrable function. */
2131 fndecl = get_callee_fndecl (exp);
2132 if (fndecl)
2134 if (!flag_no_inline
2135 && fndecl != current_function_decl
2136 && DECL_INLINE (fndecl)
2137 && DECL_SAVED_INSNS (fndecl)
2138 && DECL_SAVED_INSNS (fndecl)->inlinable)
2139 is_integrable = 1;
2140 else if (! TREE_ADDRESSABLE (fndecl))
2142 /* In case this function later becomes inlinable,
2143 record that there was already a non-inline call to it.
2145 Use abstraction instead of setting TREE_ADDRESSABLE
2146 directly. */
2147 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
2148 && optimize > 0)
2150 warning_with_decl (fndecl, "can't inline call to `%s'");
2151 warning ("called from here");
2153 (*lang_hooks.mark_addressable) (fndecl);
2156 flags |= flags_from_decl_or_type (fndecl);
2159 /* If we don't have specific function to call, see if we have a
2160 attributes set in the type. */
2161 else
2162 flags |= flags_from_decl_or_type (TREE_TYPE (TREE_TYPE (p)));
2164 /* Warn if this value is an aggregate type,
2165 regardless of which calling convention we are using for it. */
2166 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
2167 warning ("function call has aggregate value");
2169 /* If the result of a pure or const function call is ignored (or void),
2170 and none of its arguments are volatile, we can avoid expanding the
2171 call and just evaluate the arguments for side-effects. */
2172 if ((flags & (ECF_CONST | ECF_PURE))
2173 && (ignore || target == const0_rtx
2174 || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
2176 bool volatilep = false;
2177 tree arg;
2179 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
2180 if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
2182 volatilep = true;
2183 break;
2186 if (! volatilep)
2188 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
2189 expand_expr (TREE_VALUE (arg), const0_rtx,
2190 VOIDmode, EXPAND_NORMAL);
2191 return const0_rtx;
2195 #ifdef REG_PARM_STACK_SPACE
2196 #ifdef MAYBE_REG_PARM_STACK_SPACE
2197 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2198 #else
2199 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2200 #endif
2201 #endif
2203 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2204 if (reg_parm_stack_space > 0 && PUSH_ARGS)
2205 must_preallocate = 1;
2206 #endif
2208 /* Set up a place to return a structure. */
2210 /* Cater to broken compilers. */
2211 if (aggregate_value_p (exp))
2213 /* This call returns a big structure. */
2214 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
2216 #ifdef PCC_STATIC_STRUCT_RETURN
2218 pcc_struct_value = 1;
2219 /* Easier than making that case work right. */
2220 if (is_integrable)
2222 /* In case this is a static function, note that it has been
2223 used. */
2224 if (! TREE_ADDRESSABLE (fndecl))
2225 (*lang_hooks.mark_addressable) (fndecl);
2226 is_integrable = 0;
2229 #else /* not PCC_STATIC_STRUCT_RETURN */
2231 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2233 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (exp))
2235 /* The structure value address arg is already in actparms.
2236 Pull it out. It might be nice to just leave it there, but
2237 we need to set structure_value_addr. */
2238 tree return_arg = TREE_VALUE (actparms);
2239 actparms = TREE_CHAIN (actparms);
2240 structure_value_addr = expand_expr (return_arg, NULL_RTX,
2241 VOIDmode, EXPAND_NORMAL);
2243 else if (target && GET_CODE (target) == MEM)
2244 structure_value_addr = XEXP (target, 0);
2245 else
2247 /* For variable-sized objects, we must be called with a target
2248 specified. If we were to allocate space on the stack here,
2249 we would have no way of knowing when to free it. */
2250 rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
2252 mark_temp_addr_taken (d);
2253 structure_value_addr = XEXP (d, 0);
2254 target = 0;
2257 #endif /* not PCC_STATIC_STRUCT_RETURN */
2260 /* If called function is inline, try to integrate it. */
2262 if (is_integrable)
2264 rtx temp = try_to_integrate (fndecl, actparms, target,
2265 ignore, TREE_TYPE (exp),
2266 structure_value_addr);
2267 if (temp != (rtx) (size_t) - 1)
2268 return temp;
2271 /* Figure out the amount to which the stack should be aligned. */
2272 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2273 if (fndecl)
2275 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2276 if (i && i->preferred_incoming_stack_boundary)
2277 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2280 /* Operand 0 is a pointer-to-function; get the type of the function. */
2281 funtype = TREE_TYPE (addr);
2282 if (! POINTER_TYPE_P (funtype))
2283 abort ();
2284 funtype = TREE_TYPE (funtype);
2286 /* Munge the tree to split complex arguments into their imaginary
2287 and real parts. */
2288 if (SPLIT_COMPLEX_ARGS)
2290 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2291 actparms = split_complex_values (actparms);
2293 else
2294 type_arg_types = TYPE_ARG_TYPES (funtype);
2296 /* See if this is a call to a function that can return more than once
2297 or a call to longjmp or malloc. */
2298 flags |= special_function_p (fndecl, flags);
2300 if (flags & ECF_MAY_BE_ALLOCA)
2301 current_function_calls_alloca = 1;
2303 /* If struct_value_rtx is 0, it means pass the address
2304 as if it were an extra parameter. */
2305 if (structure_value_addr && struct_value_rtx == 0)
2307 /* If structure_value_addr is a REG other than
2308 virtual_outgoing_args_rtx, we can use always use it. If it
2309 is not a REG, we must always copy it into a register.
2310 If it is virtual_outgoing_args_rtx, we must copy it to another
2311 register in some cases. */
2312 rtx temp = (GET_CODE (structure_value_addr) != REG
2313 || (ACCUMULATE_OUTGOING_ARGS
2314 && stack_arg_under_construction
2315 && structure_value_addr == virtual_outgoing_args_rtx)
2316 ? copy_addr_to_reg (structure_value_addr)
2317 : structure_value_addr);
2319 actparms
2320 = tree_cons (error_mark_node,
2321 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2322 temp),
2323 actparms);
2324 structure_value_addr_parm = 1;
2327 /* Count the arguments and set NUM_ACTUALS. */
2328 for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2329 num_actuals++;
2331 /* Compute number of named args.
2332 Normally, don't include the last named arg if anonymous args follow.
2333 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
2334 (If no anonymous args follow, the result of list_length is actually
2335 one too large. This is harmless.)
2337 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
2338 zero, this machine will be able to place unnamed args that were
2339 passed in registers into the stack. So treat all args as named.
2340 This allows the insns emitting for a specific argument list to be
2341 independent of the function declaration.
2343 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any
2344 reliable way to pass unnamed args in registers, so we must force
2345 them into memory. */
2347 if ((STRICT_ARGUMENT_NAMING
2348 || ! PRETEND_OUTGOING_VARARGS_NAMED)
2349 && type_arg_types != 0)
2350 n_named_args
2351 = (list_length (type_arg_types)
2352 /* Don't include the last named arg. */
2353 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
2354 /* Count the struct value address, if it is passed as a parm. */
2355 + structure_value_addr_parm);
2356 else
2357 /* If we know nothing, treat all args as named. */
2358 n_named_args = num_actuals;
2360 /* Start updating where the next arg would go.
2362 On some machines (such as the PA) indirect calls have a different
2363 calling convention than normal calls. The last argument in
2364 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2365 or not. */
2366 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl);
2368 /* Make a vector to hold all the information about each arg. */
2369 args = alloca (num_actuals * sizeof (struct arg_data));
2370 memset (args, 0, num_actuals * sizeof (struct arg_data));
2372 /* Build up entries in the ARGS array, compute the size of the
2373 arguments into ARGS_SIZE, etc. */
2374 initialize_argument_information (num_actuals, args, &args_size,
2375 n_named_args, actparms, fndecl,
2376 &args_so_far, reg_parm_stack_space,
2377 &old_stack_level, &old_pending_adj,
2378 &must_preallocate, &flags);
2380 if (args_size.var)
2382 /* If this function requires a variable-sized argument list, don't
2383 try to make a cse'able block for this call. We may be able to
2384 do this eventually, but it is too complicated to keep track of
2385 what insns go in the cse'able block and which don't. */
2387 flags &= ~ECF_LIBCALL_BLOCK;
2388 must_preallocate = 1;
2391 /* Now make final decision about preallocating stack space. */
2392 must_preallocate = finalize_must_preallocate (must_preallocate,
2393 num_actuals, args,
2394 &args_size);
2396 /* If the structure value address will reference the stack pointer, we
2397 must stabilize it. We don't need to do this if we know that we are
2398 not going to adjust the stack pointer in processing this call. */
2400 if (structure_value_addr
2401 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2402 || reg_mentioned_p (virtual_outgoing_args_rtx,
2403 structure_value_addr))
2404 && (args_size.var
2405 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2406 structure_value_addr = copy_to_reg (structure_value_addr);
2408 /* Tail calls can make things harder to debug, and we're traditionally
2409 pushed these optimizations into -O2. Don't try if we're already
2410 expanding a call, as that means we're an argument. Don't try if
2411 there's cleanups, as we know there's code to follow the call.
2413 If rtx_equal_function_value_matters is false, that means we've
2414 finished with regular parsing. Which means that some of the
2415 machinery we use to generate tail-calls is no longer in place.
2416 This is most often true of sjlj-exceptions, which we couldn't
2417 tail-call to anyway. */
2419 if (currently_expanding_call++ != 0
2420 || !flag_optimize_sibling_calls
2421 || !rtx_equal_function_value_matters
2422 || any_pending_cleanups ()
2423 || args_size.var)
2424 try_tail_call = try_tail_recursion = 0;
2426 /* Tail recursion fails, when we are not dealing with recursive calls. */
2427 if (!try_tail_recursion
2428 || TREE_CODE (addr) != ADDR_EXPR
2429 || TREE_OPERAND (addr, 0) != current_function_decl)
2430 try_tail_recursion = 0;
2432 /* Rest of purposes for tail call optimizations to fail. */
2433 if (
2434 #ifdef HAVE_sibcall_epilogue
2435 !HAVE_sibcall_epilogue
2436 #else
2438 #endif
2439 || !try_tail_call
2440 /* Doing sibling call optimization needs some work, since
2441 structure_value_addr can be allocated on the stack.
2442 It does not seem worth the effort since few optimizable
2443 sibling calls will return a structure. */
2444 || structure_value_addr != NULL_RTX
2445 /* Check whether the target is able to optimize the call
2446 into a sibcall. */
2447 || !(*targetm.function_ok_for_sibcall) (fndecl, exp)
2448 /* Functions that do not return exactly once may not be sibcall
2449 optimized. */
2450 || (flags & (ECF_RETURNS_TWICE | ECF_LONGJMP | ECF_NORETURN))
2451 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2452 /* If the called function is nested in the current one, it might access
2453 some of the caller's arguments, but could clobber them beforehand if
2454 the argument areas are shared. */
2455 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2456 /* If this function requires more stack slots than the current
2457 function, we cannot change it into a sibling call. */
2458 || args_size.constant > current_function_args_size
2459 /* If the callee pops its own arguments, then it must pop exactly
2460 the same number of arguments as the current function. */
2461 || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2462 != RETURN_POPS_ARGS (current_function_decl,
2463 TREE_TYPE (current_function_decl),
2464 current_function_args_size))
2465 || !(*lang_hooks.decls.ok_for_sibcall) (fndecl))
2466 try_tail_call = 0;
2468 if (try_tail_call || try_tail_recursion)
2470 int end, inc;
2471 actparms = NULL_TREE;
2472 /* Ok, we're going to give the tail call the old college try.
2473 This means we're going to evaluate the function arguments
2474 up to three times. There are two degrees of badness we can
2475 encounter, those that can be unsaved and those that can't.
2476 (See unsafe_for_reeval commentary for details.)
2478 Generate a new argument list. Pass safe arguments through
2479 unchanged. For the easy badness wrap them in UNSAVE_EXPRs.
2480 For hard badness, evaluate them now and put their resulting
2481 rtx in a temporary VAR_DECL.
2483 initialize_argument_information has ordered the array for the
2484 order to be pushed, and we must remember this when reconstructing
2485 the original argument order. */
2487 if (PUSH_ARGS_REVERSED)
2489 inc = 1;
2490 i = 0;
2491 end = num_actuals;
2493 else
2495 inc = -1;
2496 i = num_actuals - 1;
2497 end = -1;
2500 for (; i != end; i += inc)
2502 args[i].tree_value = fix_unsafe_tree (args[i].tree_value);
2503 /* We need to build actparms for optimize_tail_recursion. We can
2504 safely trash away TREE_PURPOSE, since it is unused by this
2505 function. */
2506 if (try_tail_recursion)
2507 actparms = tree_cons (NULL_TREE, args[i].tree_value, actparms);
2509 /* Do the same for the function address if it is an expression. */
2510 if (!fndecl)
2511 addr = fix_unsafe_tree (addr);
2512 /* Expanding one of those dangerous arguments could have added
2513 cleanups, but otherwise give it a whirl. */
2514 if (any_pending_cleanups ())
2515 try_tail_call = try_tail_recursion = 0;
2518 /* Generate a tail recursion sequence when calling ourselves. */
2520 if (try_tail_recursion)
2522 /* We want to emit any pending stack adjustments before the tail
2523 recursion "call". That way we know any adjustment after the tail
2524 recursion call can be ignored if we indeed use the tail recursion
2525 call expansion. */
2526 int save_pending_stack_adjust = pending_stack_adjust;
2527 int save_stack_pointer_delta = stack_pointer_delta;
2529 /* Emit any queued insns now; otherwise they would end up in
2530 only one of the alternates. */
2531 emit_queue ();
2533 /* Use a new sequence to hold any RTL we generate. We do not even
2534 know if we will use this RTL yet. The final decision can not be
2535 made until after RTL generation for the entire function is
2536 complete. */
2537 start_sequence ();
2538 /* If expanding any of the arguments creates cleanups, we can't
2539 do a tailcall. So, we'll need to pop the pending cleanups
2540 list. If, however, all goes well, and there are no cleanups
2541 then the call to expand_start_target_temps will have no
2542 effect. */
2543 expand_start_target_temps ();
2544 if (optimize_tail_recursion (actparms, get_last_insn ()))
2546 if (any_pending_cleanups ())
2547 try_tail_call = try_tail_recursion = 0;
2548 else
2549 tail_recursion_insns = get_insns ();
2551 expand_end_target_temps ();
2552 end_sequence ();
2554 /* Restore the original pending stack adjustment for the sibling and
2555 normal call cases below. */
2556 pending_stack_adjust = save_pending_stack_adjust;
2557 stack_pointer_delta = save_stack_pointer_delta;
2560 if (profile_arc_flag && (flags & ECF_FORK_OR_EXEC))
2562 /* A fork duplicates the profile information, and an exec discards
2563 it. We can't rely on fork/exec to be paired. So write out the
2564 profile information we have gathered so far, and clear it. */
2565 /* ??? When Linux's __clone is called with CLONE_VM set, profiling
2566 is subject to race conditions, just as with multithreaded
2567 programs. */
2569 emit_library_call (gcov_flush_libfunc, LCT_ALWAYS_RETURN, VOIDmode, 0);
2572 /* Ensure current function's preferred stack boundary is at least
2573 what we need. We don't have to increase alignment for recursive
2574 functions. */
2575 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2576 && fndecl != current_function_decl)
2577 cfun->preferred_stack_boundary = preferred_stack_boundary;
2578 if (fndecl == current_function_decl)
2579 cfun->recursive_call_emit = true;
2581 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2583 function_call_count++;
2585 /* We want to make two insn chains; one for a sibling call, the other
2586 for a normal call. We will select one of the two chains after
2587 initial RTL generation is complete. */
2588 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2590 int sibcall_failure = 0;
2591 /* We want to emit any pending stack adjustments before the tail
2592 recursion "call". That way we know any adjustment after the tail
2593 recursion call can be ignored if we indeed use the tail recursion
2594 call expansion. */
2595 int save_pending_stack_adjust = 0;
2596 int save_stack_pointer_delta = 0;
2597 rtx insns;
2598 rtx before_call, next_arg_reg;
2600 if (pass == 0)
2602 /* Emit any queued insns now; otherwise they would end up in
2603 only one of the alternates. */
2604 emit_queue ();
2606 /* State variables we need to save and restore between
2607 iterations. */
2608 save_pending_stack_adjust = pending_stack_adjust;
2609 save_stack_pointer_delta = stack_pointer_delta;
2611 if (pass)
2612 flags &= ~ECF_SIBCALL;
2613 else
2614 flags |= ECF_SIBCALL;
2616 /* Other state variables that we must reinitialize each time
2617 through the loop (that are not initialized by the loop itself). */
2618 argblock = 0;
2619 call_fusage = 0;
2621 /* Start a new sequence for the normal call case.
2623 From this point on, if the sibling call fails, we want to set
2624 sibcall_failure instead of continuing the loop. */
2625 start_sequence ();
2627 if (pass == 0)
2629 /* We know at this point that there are not currently any
2630 pending cleanups. If, however, in the process of evaluating
2631 the arguments we were to create some, we'll need to be
2632 able to get rid of them. */
2633 expand_start_target_temps ();
2636 /* Don't let pending stack adjusts add up to too much.
2637 Also, do all pending adjustments now if there is any chance
2638 this might be a call to alloca or if we are expanding a sibling
2639 call sequence or if we are calling a function that is to return
2640 with stack pointer depressed. */
2641 if (pending_stack_adjust >= 32
2642 || (pending_stack_adjust > 0
2643 && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
2644 || pass == 0)
2645 do_pending_stack_adjust ();
2647 /* When calling a const function, we must pop the stack args right away,
2648 so that the pop is deleted or moved with the call. */
2649 if (pass && (flags & ECF_LIBCALL_BLOCK))
2650 NO_DEFER_POP;
2652 #ifdef FINAL_REG_PARM_STACK_SPACE
2653 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2654 args_size.var);
2655 #endif
2656 /* Precompute any arguments as needed. */
2657 if (pass)
2658 precompute_arguments (flags, num_actuals, args);
2660 /* Now we are about to start emitting insns that can be deleted
2661 if a libcall is deleted. */
2662 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
2663 start_sequence ();
2665 adjusted_args_size = args_size;
2666 /* Compute the actual size of the argument block required. The variable
2667 and constant sizes must be combined, the size may have to be rounded,
2668 and there may be a minimum required size. When generating a sibcall
2669 pattern, do not round up, since we'll be re-using whatever space our
2670 caller provided. */
2671 unadjusted_args_size
2672 = compute_argument_block_size (reg_parm_stack_space,
2673 &adjusted_args_size,
2674 (pass == 0 ? 0
2675 : preferred_stack_boundary));
2677 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2679 /* The argument block when performing a sibling call is the
2680 incoming argument block. */
2681 if (pass == 0)
2683 argblock = virtual_incoming_args_rtx;
2684 argblock
2685 #ifdef STACK_GROWS_DOWNWARD
2686 = plus_constant (argblock, current_function_pretend_args_size);
2687 #else
2688 = plus_constant (argblock, -current_function_pretend_args_size);
2689 #endif
2690 stored_args_map = sbitmap_alloc (args_size.constant);
2691 sbitmap_zero (stored_args_map);
2694 /* If we have no actual push instructions, or shouldn't use them,
2695 make space for all args right now. */
2696 else if (adjusted_args_size.var != 0)
2698 if (old_stack_level == 0)
2700 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2701 old_stack_pointer_delta = stack_pointer_delta;
2702 old_pending_adj = pending_stack_adjust;
2703 pending_stack_adjust = 0;
2704 /* stack_arg_under_construction says whether a stack arg is
2705 being constructed at the old stack level. Pushing the stack
2706 gets a clean outgoing argument block. */
2707 old_stack_arg_under_construction = stack_arg_under_construction;
2708 stack_arg_under_construction = 0;
2710 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2712 else
2714 /* Note that we must go through the motions of allocating an argument
2715 block even if the size is zero because we may be storing args
2716 in the area reserved for register arguments, which may be part of
2717 the stack frame. */
2719 int needed = adjusted_args_size.constant;
2721 /* Store the maximum argument space used. It will be pushed by
2722 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2723 checking). */
2725 if (needed > current_function_outgoing_args_size)
2726 current_function_outgoing_args_size = needed;
2728 if (must_preallocate)
2730 if (ACCUMULATE_OUTGOING_ARGS)
2732 /* Since the stack pointer will never be pushed, it is
2733 possible for the evaluation of a parm to clobber
2734 something we have already written to the stack.
2735 Since most function calls on RISC machines do not use
2736 the stack, this is uncommon, but must work correctly.
2738 Therefore, we save any area of the stack that was already
2739 written and that we are using. Here we set up to do this
2740 by making a new stack usage map from the old one. The
2741 actual save will be done by store_one_arg.
2743 Another approach might be to try to reorder the argument
2744 evaluations to avoid this conflicting stack usage. */
2746 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2747 /* Since we will be writing into the entire argument area,
2748 the map must be allocated for its entire size, not just
2749 the part that is the responsibility of the caller. */
2750 needed += reg_parm_stack_space;
2751 #endif
2753 #ifdef ARGS_GROW_DOWNWARD
2754 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2755 needed + 1);
2756 #else
2757 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2758 needed);
2759 #endif
2760 stack_usage_map = alloca (highest_outgoing_arg_in_use);
2762 if (initial_highest_arg_in_use)
2763 memcpy (stack_usage_map, initial_stack_usage_map,
2764 initial_highest_arg_in_use);
2766 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2767 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2768 (highest_outgoing_arg_in_use
2769 - initial_highest_arg_in_use));
2770 needed = 0;
2772 /* The address of the outgoing argument list must not be
2773 copied to a register here, because argblock would be left
2774 pointing to the wrong place after the call to
2775 allocate_dynamic_stack_space below. */
2777 argblock = virtual_outgoing_args_rtx;
2779 else
2781 if (inhibit_defer_pop == 0)
2783 /* Try to reuse some or all of the pending_stack_adjust
2784 to get this space. */
2785 needed
2786 = (combine_pending_stack_adjustment_and_call
2787 (unadjusted_args_size,
2788 &adjusted_args_size,
2789 preferred_unit_stack_boundary));
2791 /* combine_pending_stack_adjustment_and_call computes
2792 an adjustment before the arguments are allocated.
2793 Account for them and see whether or not the stack
2794 needs to go up or down. */
2795 needed = unadjusted_args_size - needed;
2797 if (needed < 0)
2799 /* We're releasing stack space. */
2800 /* ??? We can avoid any adjustment at all if we're
2801 already aligned. FIXME. */
2802 pending_stack_adjust = -needed;
2803 do_pending_stack_adjust ();
2804 needed = 0;
2806 else
2807 /* We need to allocate space. We'll do that in
2808 push_block below. */
2809 pending_stack_adjust = 0;
2812 /* Special case this because overhead of `push_block' in
2813 this case is non-trivial. */
2814 if (needed == 0)
2815 argblock = virtual_outgoing_args_rtx;
2816 else
2818 argblock = push_block (GEN_INT (needed), 0, 0);
2819 #ifdef ARGS_GROW_DOWNWARD
2820 argblock = plus_constant (argblock, needed);
2821 #endif
2824 /* We only really need to call `copy_to_reg' in the case
2825 where push insns are going to be used to pass ARGBLOCK
2826 to a function call in ARGS. In that case, the stack
2827 pointer changes value from the allocation point to the
2828 call point, and hence the value of
2829 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2830 as well always do it. */
2831 argblock = copy_to_reg (argblock);
2836 if (ACCUMULATE_OUTGOING_ARGS)
2838 /* The save/restore code in store_one_arg handles all
2839 cases except one: a constructor call (including a C
2840 function returning a BLKmode struct) to initialize
2841 an argument. */
2842 if (stack_arg_under_construction)
2844 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2845 rtx push_size = GEN_INT (reg_parm_stack_space
2846 + adjusted_args_size.constant);
2847 #else
2848 rtx push_size = GEN_INT (adjusted_args_size.constant);
2849 #endif
2850 if (old_stack_level == 0)
2852 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2853 NULL_RTX);
2854 old_stack_pointer_delta = stack_pointer_delta;
2855 old_pending_adj = pending_stack_adjust;
2856 pending_stack_adjust = 0;
2857 /* stack_arg_under_construction says whether a stack
2858 arg is being constructed at the old stack level.
2859 Pushing the stack gets a clean outgoing argument
2860 block. */
2861 old_stack_arg_under_construction
2862 = stack_arg_under_construction;
2863 stack_arg_under_construction = 0;
2864 /* Make a new map for the new argument list. */
2865 stack_usage_map = alloca (highest_outgoing_arg_in_use);
2866 memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
2867 highest_outgoing_arg_in_use = 0;
2869 allocate_dynamic_stack_space (push_size, NULL_RTX,
2870 BITS_PER_UNIT);
2873 /* If argument evaluation might modify the stack pointer,
2874 copy the address of the argument list to a register. */
2875 for (i = 0; i < num_actuals; i++)
2876 if (args[i].pass_on_stack)
2878 argblock = copy_addr_to_reg (argblock);
2879 break;
2883 compute_argument_addresses (args, argblock, num_actuals);
2885 /* If we push args individually in reverse order, perform stack alignment
2886 before the first push (the last arg). */
2887 if (PUSH_ARGS_REVERSED && argblock == 0
2888 && adjusted_args_size.constant != unadjusted_args_size)
2890 /* When the stack adjustment is pending, we get better code
2891 by combining the adjustments. */
2892 if (pending_stack_adjust
2893 && ! (flags & ECF_LIBCALL_BLOCK)
2894 && ! inhibit_defer_pop)
2896 pending_stack_adjust
2897 = (combine_pending_stack_adjustment_and_call
2898 (unadjusted_args_size,
2899 &adjusted_args_size,
2900 preferred_unit_stack_boundary));
2901 do_pending_stack_adjust ();
2903 else if (argblock == 0)
2904 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2905 - unadjusted_args_size));
2907 /* Now that the stack is properly aligned, pops can't safely
2908 be deferred during the evaluation of the arguments. */
2909 NO_DEFER_POP;
2911 funexp = rtx_for_function_call (fndecl, addr);
2913 /* Figure out the register where the value, if any, will come back. */
2914 valreg = 0;
2915 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2916 && ! structure_value_addr)
2918 if (pcc_struct_value)
2919 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2920 fndecl, (pass == 0));
2921 else
2922 valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
2925 /* Precompute all register parameters. It isn't safe to compute anything
2926 once we have started filling any specific hard regs. */
2927 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2929 #ifdef REG_PARM_STACK_SPACE
2930 /* Save the fixed argument area if it's part of the caller's frame and
2931 is clobbered by argument setup for this call. */
2932 if (ACCUMULATE_OUTGOING_ARGS && pass)
2933 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2934 &low_to_save, &high_to_save);
2935 #endif
2937 /* Now store (and compute if necessary) all non-register parms.
2938 These come before register parms, since they can require block-moves,
2939 which could clobber the registers used for register parms.
2940 Parms which have partial registers are not stored here,
2941 but we do preallocate space here if they want that. */
2943 for (i = 0; i < num_actuals; i++)
2944 if (args[i].reg == 0 || args[i].pass_on_stack)
2946 rtx before_arg = get_last_insn ();
2948 if (store_one_arg (&args[i], argblock, flags,
2949 adjusted_args_size.var != 0,
2950 reg_parm_stack_space)
2951 || (pass == 0
2952 && check_sibcall_argument_overlap (before_arg,
2953 &args[i], 1)))
2954 sibcall_failure = 1;
2957 /* If we have a parm that is passed in registers but not in memory
2958 and whose alignment does not permit a direct copy into registers,
2959 make a group of pseudos that correspond to each register that we
2960 will later fill. */
2961 if (STRICT_ALIGNMENT)
2962 store_unaligned_arguments_into_pseudos (args, num_actuals);
2964 /* Now store any partially-in-registers parm.
2965 This is the last place a block-move can happen. */
2966 if (reg_parm_seen)
2967 for (i = 0; i < num_actuals; i++)
2968 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2970 rtx before_arg = get_last_insn ();
2972 if (store_one_arg (&args[i], argblock, flags,
2973 adjusted_args_size.var != 0,
2974 reg_parm_stack_space)
2975 || (pass == 0
2976 && check_sibcall_argument_overlap (before_arg,
2977 &args[i], 1)))
2978 sibcall_failure = 1;
2981 /* If we pushed args in forward order, perform stack alignment
2982 after pushing the last arg. */
2983 if (!PUSH_ARGS_REVERSED && argblock == 0)
2984 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2985 - unadjusted_args_size));
2987 /* If register arguments require space on the stack and stack space
2988 was not preallocated, allocate stack space here for arguments
2989 passed in registers. */
2990 #ifdef OUTGOING_REG_PARM_STACK_SPACE
2991 if (!ACCUMULATE_OUTGOING_ARGS
2992 && must_preallocate == 0 && reg_parm_stack_space > 0)
2993 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2994 #endif
2996 /* Pass the function the address in which to return a
2997 structure value. */
2998 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3000 #ifdef POINTERS_EXTEND_UNSIGNED
3001 if (GET_MODE (structure_value_addr) != Pmode)
3002 structure_value_addr = convert_memory_address
3003 (Pmode, structure_value_addr);
3004 #endif
3005 emit_move_insn (struct_value_rtx,
3006 force_reg (Pmode,
3007 force_operand (structure_value_addr,
3008 NULL_RTX)));
3010 if (GET_CODE (struct_value_rtx) == REG)
3011 use_reg (&call_fusage, struct_value_rtx);
3014 funexp = prepare_call_address (funexp, fndecl, &call_fusage,
3015 reg_parm_seen, pass == 0);
3017 load_register_parameters (args, num_actuals, &call_fusage, flags,
3018 pass == 0, &sibcall_failure);
3020 /* Perform postincrements before actually calling the function. */
3021 emit_queue ();
3023 /* Save a pointer to the last insn before the call, so that we can
3024 later safely search backwards to find the CALL_INSN. */
3025 before_call = get_last_insn ();
3027 /* Set up next argument register. For sibling calls on machines
3028 with register windows this should be the incoming register. */
3029 #ifdef FUNCTION_INCOMING_ARG
3030 if (pass == 0)
3031 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
3032 void_type_node, 1);
3033 else
3034 #endif
3035 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
3036 void_type_node, 1);
3038 /* All arguments and registers used for the call must be set up by
3039 now! */
3041 /* Stack must be properly aligned now. */
3042 if (pass && stack_pointer_delta % preferred_unit_stack_boundary)
3043 abort ();
3045 /* Generate the actual call instruction. */
3046 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
3047 adjusted_args_size.constant, struct_value_size,
3048 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
3049 flags, & args_so_far);
3051 /* If call is cse'able, make appropriate pair of reg-notes around it.
3052 Test valreg so we don't crash; may safely ignore `const'
3053 if return type is void. Disable for PARALLEL return values, because
3054 we have no way to move such values into a pseudo register. */
3055 if (pass && (flags & ECF_LIBCALL_BLOCK))
3057 rtx insns;
3059 if (valreg == 0 || GET_CODE (valreg) == PARALLEL)
3061 insns = get_insns ();
3062 end_sequence ();
3063 emit_insn (insns);
3065 else
3067 rtx note = 0;
3068 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3070 /* Mark the return value as a pointer if needed. */
3071 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3072 mark_reg_pointer (temp,
3073 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
3075 /* Construct an "equal form" for the value which mentions all the
3076 arguments in order as well as the function name. */
3077 for (i = 0; i < num_actuals; i++)
3078 note = gen_rtx_EXPR_LIST (VOIDmode,
3079 args[i].initial_value, note);
3080 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
3082 insns = get_insns ();
3083 end_sequence ();
3085 if (flags & ECF_PURE)
3086 note = gen_rtx_EXPR_LIST (VOIDmode,
3087 gen_rtx_USE (VOIDmode,
3088 gen_rtx_MEM (BLKmode,
3089 gen_rtx_SCRATCH (VOIDmode))),
3090 note);
3092 emit_libcall_block (insns, temp, valreg, note);
3094 valreg = temp;
3097 else if (pass && (flags & ECF_MALLOC))
3099 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3100 rtx last, insns;
3102 /* The return value from a malloc-like function is a pointer. */
3103 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3104 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
3106 emit_move_insn (temp, valreg);
3108 /* The return value from a malloc-like function can not alias
3109 anything else. */
3110 last = get_last_insn ();
3111 REG_NOTES (last) =
3112 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
3114 /* Write out the sequence. */
3115 insns = get_insns ();
3116 end_sequence ();
3117 emit_insn (insns);
3118 valreg = temp;
3121 /* For calls to `setjmp', etc., inform flow.c it should complain
3122 if nonvolatile values are live. For functions that cannot return,
3123 inform flow that control does not fall through. */
3125 if ((flags & (ECF_NORETURN | ECF_LONGJMP)) || pass == 0)
3127 /* The barrier must be emitted
3128 immediately after the CALL_INSN. Some ports emit more
3129 than just a CALL_INSN above, so we must search for it here. */
3131 rtx last = get_last_insn ();
3132 while (GET_CODE (last) != CALL_INSN)
3134 last = PREV_INSN (last);
3135 /* There was no CALL_INSN? */
3136 if (last == before_call)
3137 abort ();
3140 emit_barrier_after (last);
3142 /* Stack adjustments after a noreturn call are dead code. */
3143 stack_pointer_delta = old_stack_allocated;
3144 pending_stack_adjust = 0;
3147 if (flags & ECF_LONGJMP)
3148 current_function_calls_longjmp = 1;
3150 /* If value type not void, return an rtx for the value. */
3152 /* If there are cleanups to be called, don't use a hard reg as target.
3153 We need to double check this and see if it matters anymore. */
3154 if (any_pending_cleanups ())
3156 if (target && REG_P (target)
3157 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3158 target = 0;
3159 sibcall_failure = 1;
3162 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
3163 || ignore)
3164 target = const0_rtx;
3165 else if (structure_value_addr)
3167 if (target == 0 || GET_CODE (target) != MEM)
3169 target
3170 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3171 memory_address (TYPE_MODE (TREE_TYPE (exp)),
3172 structure_value_addr));
3173 set_mem_attributes (target, exp, 1);
3176 else if (pcc_struct_value)
3178 /* This is the special C++ case where we need to
3179 know what the true target was. We take care to
3180 never use this value more than once in one expression. */
3181 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3182 copy_to_reg (valreg));
3183 set_mem_attributes (target, exp, 1);
3185 /* Handle calls that return values in multiple non-contiguous locations.
3186 The Irix 6 ABI has examples of this. */
3187 else if (GET_CODE (valreg) == PARALLEL)
3189 if (target == 0)
3191 /* This will only be assigned once, so it can be readonly. */
3192 tree nt = build_qualified_type (TREE_TYPE (exp),
3193 (TYPE_QUALS (TREE_TYPE (exp))
3194 | TYPE_QUAL_CONST));
3196 target = assign_temp (nt, 0, 1, 1);
3197 preserve_temp_slots (target);
3200 if (! rtx_equal_p (target, valreg))
3201 emit_group_store (target, valreg, TREE_TYPE (exp),
3202 int_size_in_bytes (TREE_TYPE (exp)));
3204 /* We can not support sibling calls for this case. */
3205 sibcall_failure = 1;
3207 else if (target
3208 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
3209 && GET_MODE (target) == GET_MODE (valreg))
3211 /* TARGET and VALREG cannot be equal at this point because the
3212 latter would not have REG_FUNCTION_VALUE_P true, while the
3213 former would if it were referring to the same register.
3215 If they refer to the same register, this move will be a no-op,
3216 except when function inlining is being done. */
3217 emit_move_insn (target, valreg);
3219 /* If we are setting a MEM, this code must be executed. Since it is
3220 emitted after the call insn, sibcall optimization cannot be
3221 performed in that case. */
3222 if (GET_CODE (target) == MEM)
3223 sibcall_failure = 1;
3225 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
3227 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
3229 /* We can not support sibling calls for this case. */
3230 sibcall_failure = 1;
3232 else
3233 target = copy_to_reg (valreg);
3235 #ifdef PROMOTE_FUNCTION_RETURN
3236 /* If we promoted this return value, make the proper SUBREG. TARGET
3237 might be const0_rtx here, so be careful. */
3238 if (GET_CODE (target) == REG
3239 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3240 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3242 tree type = TREE_TYPE (exp);
3243 int unsignedp = TREE_UNSIGNED (type);
3244 int offset = 0;
3246 /* If we don't promote as expected, something is wrong. */
3247 if (GET_MODE (target)
3248 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
3249 abort ();
3251 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3252 && GET_MODE_SIZE (GET_MODE (target))
3253 > GET_MODE_SIZE (TYPE_MODE (type)))
3255 offset = GET_MODE_SIZE (GET_MODE (target))
3256 - GET_MODE_SIZE (TYPE_MODE (type));
3257 if (! BYTES_BIG_ENDIAN)
3258 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3259 else if (! WORDS_BIG_ENDIAN)
3260 offset %= UNITS_PER_WORD;
3262 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3263 SUBREG_PROMOTED_VAR_P (target) = 1;
3264 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3266 #endif
3268 /* If size of args is variable or this was a constructor call for a stack
3269 argument, restore saved stack-pointer value. */
3271 if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
3273 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3274 stack_pointer_delta = old_stack_pointer_delta;
3275 pending_stack_adjust = old_pending_adj;
3276 stack_arg_under_construction = old_stack_arg_under_construction;
3277 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3278 stack_usage_map = initial_stack_usage_map;
3279 sibcall_failure = 1;
3281 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3283 #ifdef REG_PARM_STACK_SPACE
3284 if (save_area)
3285 restore_fixed_argument_area (save_area, argblock,
3286 high_to_save, low_to_save);
3287 #endif
3289 /* If we saved any argument areas, restore them. */
3290 for (i = 0; i < num_actuals; i++)
3291 if (args[i].save_area)
3293 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3294 rtx stack_area
3295 = gen_rtx_MEM (save_mode,
3296 memory_address (save_mode,
3297 XEXP (args[i].stack_slot, 0)));
3299 if (save_mode != BLKmode)
3300 emit_move_insn (stack_area, args[i].save_area);
3301 else
3302 emit_block_move (stack_area, args[i].save_area,
3303 GEN_INT (args[i].locate.size.constant),
3304 BLOCK_OP_CALL_PARM);
3307 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3308 stack_usage_map = initial_stack_usage_map;
3311 /* If this was alloca, record the new stack level for nonlocal gotos.
3312 Check for the handler slots since we might not have a save area
3313 for non-local gotos. */
3315 if ((flags & ECF_MAY_BE_ALLOCA) && nonlocal_goto_handler_slots != 0)
3316 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
3318 /* Free up storage we no longer need. */
3319 for (i = 0; i < num_actuals; ++i)
3320 if (args[i].aligned_regs)
3321 free (args[i].aligned_regs);
3323 if (pass == 0)
3325 /* Undo the fake expand_start_target_temps we did earlier. If
3326 there had been any cleanups created, we've already set
3327 sibcall_failure. */
3328 expand_end_target_temps ();
3331 /* If this function is returning into a memory location marked as
3332 readonly, it means it is initializing that location. We normally treat
3333 functions as not clobbering such locations, so we need to specify that
3334 this one does. We do this by adding the appropriate CLOBBER to the
3335 CALL_INSN function usage list. This cannot be done by emitting a
3336 standalone CLOBBER after the call because the latter would be ignored
3337 by at least the delay slot scheduling pass. We do this now instead of
3338 adding to call_fusage before the call to emit_call_1 because TARGET
3339 may be modified in the meantime. */
3340 if (structure_value_addr != 0 && target != 0
3341 && GET_CODE (target) == MEM && RTX_UNCHANGING_P (target))
3342 add_function_usage_to
3343 (last_call_insn (),
3344 gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_CLOBBER (VOIDmode, target),
3345 NULL_RTX));
3347 insns = get_insns ();
3348 end_sequence ();
3350 if (pass == 0)
3352 tail_call_insns = insns;
3354 /* Restore the pending stack adjustment now that we have
3355 finished generating the sibling call sequence. */
3357 pending_stack_adjust = save_pending_stack_adjust;
3358 stack_pointer_delta = save_stack_pointer_delta;
3360 /* Prepare arg structure for next iteration. */
3361 for (i = 0; i < num_actuals; i++)
3363 args[i].value = 0;
3364 args[i].aligned_regs = 0;
3365 args[i].stack = 0;
3368 sbitmap_free (stored_args_map);
3370 else
3372 normal_call_insns = insns;
3374 /* Verify that we've deallocated all the stack we used. */
3375 if (! (flags & (ECF_NORETURN | ECF_LONGJMP))
3376 && old_stack_allocated != stack_pointer_delta
3377 - pending_stack_adjust)
3378 abort ();
3381 /* If something prevents making this a sibling call,
3382 zero out the sequence. */
3383 if (sibcall_failure)
3384 tail_call_insns = NULL_RTX;
3387 /* The function optimize_sibling_and_tail_recursive_calls doesn't
3388 handle CALL_PLACEHOLDERs inside other CALL_PLACEHOLDERs. This
3389 can happen if the arguments to this function call an inline
3390 function who's expansion contains another CALL_PLACEHOLDER.
3392 If there are any C_Ps in any of these sequences, replace them
3393 with their normal call. */
3395 for (insn = normal_call_insns; insn; insn = NEXT_INSN (insn))
3396 if (GET_CODE (insn) == CALL_INSN
3397 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3398 replace_call_placeholder (insn, sibcall_use_normal);
3400 for (insn = tail_call_insns; insn; insn = NEXT_INSN (insn))
3401 if (GET_CODE (insn) == CALL_INSN
3402 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3403 replace_call_placeholder (insn, sibcall_use_normal);
3405 for (insn = tail_recursion_insns; insn; insn = NEXT_INSN (insn))
3406 if (GET_CODE (insn) == CALL_INSN
3407 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3408 replace_call_placeholder (insn, sibcall_use_normal);
3410 /* If this was a potential tail recursion site, then emit a
3411 CALL_PLACEHOLDER with the normal and the tail recursion streams.
3412 One of them will be selected later. */
3413 if (tail_recursion_insns || tail_call_insns)
3415 /* The tail recursion label must be kept around. We could expose
3416 its use in the CALL_PLACEHOLDER, but that creates unwanted edges
3417 and makes determining true tail recursion sites difficult.
3419 So we set LABEL_PRESERVE_P here, then clear it when we select
3420 one of the call sequences after rtl generation is complete. */
3421 if (tail_recursion_insns)
3422 LABEL_PRESERVE_P (tail_recursion_label) = 1;
3423 emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode, normal_call_insns,
3424 tail_call_insns,
3425 tail_recursion_insns,
3426 tail_recursion_label));
3428 else
3429 emit_insn (normal_call_insns);
3431 currently_expanding_call--;
3433 /* If this function returns with the stack pointer depressed, ensure
3434 this block saves and restores the stack pointer, show it was
3435 changed, and adjust for any outgoing arg space. */
3436 if (flags & ECF_SP_DEPRESSED)
3438 clear_pending_stack_adjust ();
3439 emit_insn (gen_rtx (CLOBBER, VOIDmode, stack_pointer_rtx));
3440 emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3441 save_stack_pointer ();
3444 return target;
3447 /* Traverse an argument list in VALUES and expand all complex
3448 arguments into their components. */
3449 tree
3450 split_complex_values (tree values)
3452 tree p;
3454 values = copy_list (values);
3456 for (p = values; p; p = TREE_CHAIN (p))
3458 tree complex_value = TREE_VALUE (p);
3459 tree complex_type;
3461 complex_type = TREE_TYPE (complex_value);
3462 if (!complex_type)
3463 continue;
3465 if (TREE_CODE (complex_type) == COMPLEX_TYPE)
3467 tree subtype;
3468 tree real, imag, next;
3470 subtype = TREE_TYPE (complex_type);
3471 complex_value = save_expr (complex_value);
3472 real = build1 (REALPART_EXPR, subtype, complex_value);
3473 imag = build1 (IMAGPART_EXPR, subtype, complex_value);
3475 TREE_VALUE (p) = real;
3476 next = TREE_CHAIN (p);
3477 imag = build_tree_list (NULL_TREE, imag);
3478 TREE_CHAIN (p) = imag;
3479 TREE_CHAIN (imag) = next;
3481 /* Skip the newly created node. */
3482 p = TREE_CHAIN (p);
3486 return values;
3489 /* Traverse a list of TYPES and expand all complex types into their
3490 components. */
3491 tree
3492 split_complex_types (tree types)
3494 tree p;
3496 types = copy_list (types);
3498 for (p = types; p; p = TREE_CHAIN (p))
3500 tree complex_type = TREE_VALUE (p);
3502 if (TREE_CODE (complex_type) == COMPLEX_TYPE)
3504 tree next, imag;
3506 /* Rewrite complex type with component type. */
3507 TREE_VALUE (p) = TREE_TYPE (complex_type);
3508 next = TREE_CHAIN (p);
3510 /* Add another component type for the imaginary part. */
3511 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3512 TREE_CHAIN (p) = imag;
3513 TREE_CHAIN (imag) = next;
3515 /* Skip the newly created node. */
3516 p = TREE_CHAIN (p);
3520 return types;
3523 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3524 The RETVAL parameter specifies whether return value needs to be saved, other
3525 parameters are documented in the emit_library_call function below. */
3527 static rtx
3528 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3529 enum libcall_type fn_type,
3530 enum machine_mode outmode, int nargs, va_list p)
3532 /* Total size in bytes of all the stack-parms scanned so far. */
3533 struct args_size args_size;
3534 /* Size of arguments before any adjustments (such as rounding). */
3535 struct args_size original_args_size;
3536 int argnum;
3537 rtx fun;
3538 int inc;
3539 int count;
3540 rtx argblock = 0;
3541 CUMULATIVE_ARGS args_so_far;
3542 struct arg
3544 rtx value;
3545 enum machine_mode mode;
3546 rtx reg;
3547 int partial;
3548 struct locate_and_pad_arg_data locate;
3549 rtx save_area;
3551 struct arg *argvec;
3552 int old_inhibit_defer_pop = inhibit_defer_pop;
3553 rtx call_fusage = 0;
3554 rtx mem_value = 0;
3555 rtx valreg;
3556 int pcc_struct_value = 0;
3557 int struct_value_size = 0;
3558 int flags;
3559 int reg_parm_stack_space = 0;
3560 int needed;
3561 rtx before_call;
3562 tree tfom; /* type_for_mode (outmode, 0) */
3564 #ifdef REG_PARM_STACK_SPACE
3565 /* Define the boundary of the register parm stack space that needs to be
3566 save, if any. */
3567 int low_to_save, high_to_save;
3568 rtx save_area = 0; /* Place that it is saved. */
3569 #endif
3571 /* Size of the stack reserved for parameter registers. */
3572 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3573 char *initial_stack_usage_map = stack_usage_map;
3575 #ifdef REG_PARM_STACK_SPACE
3576 #ifdef MAYBE_REG_PARM_STACK_SPACE
3577 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3578 #else
3579 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3580 #endif
3581 #endif
3583 /* By default, library functions can not throw. */
3584 flags = ECF_NOTHROW;
3586 switch (fn_type)
3588 case LCT_NORMAL:
3589 break;
3590 case LCT_CONST:
3591 flags |= ECF_CONST;
3592 break;
3593 case LCT_PURE:
3594 flags |= ECF_PURE;
3595 break;
3596 case LCT_CONST_MAKE_BLOCK:
3597 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
3598 break;
3599 case LCT_PURE_MAKE_BLOCK:
3600 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
3601 break;
3602 case LCT_NORETURN:
3603 flags |= ECF_NORETURN;
3604 break;
3605 case LCT_THROW:
3606 flags = ECF_NORETURN;
3607 break;
3608 case LCT_ALWAYS_RETURN:
3609 flags = ECF_ALWAYS_RETURN;
3610 break;
3611 case LCT_RETURNS_TWICE:
3612 flags = ECF_RETURNS_TWICE;
3613 break;
3615 fun = orgfun;
3617 /* Ensure current function's preferred stack boundary is at least
3618 what we need. */
3619 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3620 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3622 /* If this kind of value comes back in memory,
3623 decide where in memory it should come back. */
3624 if (outmode != VOIDmode)
3626 tfom = (*lang_hooks.types.type_for_mode) (outmode, 0);
3627 if (aggregate_value_p (tfom))
3629 #ifdef PCC_STATIC_STRUCT_RETURN
3630 rtx pointer_reg
3631 = hard_function_value (build_pointer_type (tfom), 0, 0);
3632 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3633 pcc_struct_value = 1;
3634 if (value == 0)
3635 value = gen_reg_rtx (outmode);
3636 #else /* not PCC_STATIC_STRUCT_RETURN */
3637 struct_value_size = GET_MODE_SIZE (outmode);
3638 if (value != 0 && GET_CODE (value) == MEM)
3639 mem_value = value;
3640 else
3641 mem_value = assign_temp (tfom, 0, 1, 1);
3642 #endif
3643 /* This call returns a big structure. */
3644 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3647 else
3648 tfom = void_type_node;
3650 /* ??? Unfinished: must pass the memory address as an argument. */
3652 /* Copy all the libcall-arguments out of the varargs data
3653 and into a vector ARGVEC.
3655 Compute how to pass each argument. We only support a very small subset
3656 of the full argument passing conventions to limit complexity here since
3657 library functions shouldn't have many args. */
3659 argvec = alloca ((nargs + 1) * sizeof (struct arg));
3660 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3662 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3663 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3664 #else
3665 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3666 #endif
3668 args_size.constant = 0;
3669 args_size.var = 0;
3671 count = 0;
3673 /* Now we are about to start emitting insns that can be deleted
3674 if a libcall is deleted. */
3675 if (flags & ECF_LIBCALL_BLOCK)
3676 start_sequence ();
3678 push_temp_slots ();
3680 /* If there's a structure value address to be passed,
3681 either pass it in the special place, or pass it as an extra argument. */
3682 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3684 rtx addr = XEXP (mem_value, 0);
3685 nargs++;
3687 /* Make sure it is a reasonable operand for a move or push insn. */
3688 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3689 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3690 addr = force_operand (addr, NULL_RTX);
3692 argvec[count].value = addr;
3693 argvec[count].mode = Pmode;
3694 argvec[count].partial = 0;
3696 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3697 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3698 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3699 abort ();
3700 #endif
3702 locate_and_pad_parm (Pmode, NULL_TREE,
3703 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3705 #else
3706 argvec[count].reg != 0,
3707 #endif
3708 0, NULL_TREE, &args_size, &argvec[count].locate);
3710 if (argvec[count].reg == 0 || argvec[count].partial != 0
3711 || reg_parm_stack_space > 0)
3712 args_size.constant += argvec[count].locate.size.constant;
3714 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3716 count++;
3719 for (; count < nargs; count++)
3721 rtx val = va_arg (p, rtx);
3722 enum machine_mode mode = va_arg (p, enum machine_mode);
3724 /* We cannot convert the arg value to the mode the library wants here;
3725 must do it earlier where we know the signedness of the arg. */
3726 if (mode == BLKmode
3727 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3728 abort ();
3730 /* On some machines, there's no way to pass a float to a library fcn.
3731 Pass it as a double instead. */
3732 #ifdef LIBGCC_NEEDS_DOUBLE
3733 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3734 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3735 #endif
3737 /* There's no need to call protect_from_queue, because
3738 either emit_move_insn or emit_push_insn will do that. */
3740 /* Make sure it is a reasonable operand for a move or push insn. */
3741 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3742 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3743 val = force_operand (val, NULL_RTX);
3745 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3746 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3748 rtx slot;
3749 int must_copy = 1
3750 #ifdef FUNCTION_ARG_CALLEE_COPIES
3751 && ! FUNCTION_ARG_CALLEE_COPIES (args_so_far, mode,
3752 NULL_TREE, 1)
3753 #endif
3756 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3757 functions, so we have to pretend this isn't such a function. */
3758 if (flags & ECF_LIBCALL_BLOCK)
3760 rtx insns = get_insns ();
3761 end_sequence ();
3762 emit_insn (insns);
3764 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3766 /* If this was a CONST function, it is now PURE since
3767 it now reads memory. */
3768 if (flags & ECF_CONST)
3770 flags &= ~ECF_CONST;
3771 flags |= ECF_PURE;
3774 if (GET_MODE (val) == MEM && ! must_copy)
3775 slot = val;
3776 else if (must_copy)
3778 slot = assign_temp ((*lang_hooks.types.type_for_mode) (mode, 0),
3779 0, 1, 1);
3780 emit_move_insn (slot, val);
3782 else
3784 tree type = (*lang_hooks.types.type_for_mode) (mode, 0);
3786 slot
3787 = gen_rtx_MEM (mode,
3788 expand_expr (build1 (ADDR_EXPR,
3789 build_pointer_type (type),
3790 make_tree (type, val)),
3791 NULL_RTX, VOIDmode, 0));
3794 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3795 gen_rtx_USE (VOIDmode, slot),
3796 call_fusage);
3797 if (must_copy)
3798 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3799 gen_rtx_CLOBBER (VOIDmode,
3800 slot),
3801 call_fusage);
3803 mode = Pmode;
3804 val = force_operand (XEXP (slot, 0), NULL_RTX);
3806 #endif
3808 argvec[count].value = val;
3809 argvec[count].mode = mode;
3811 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3813 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3814 argvec[count].partial
3815 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3816 #else
3817 argvec[count].partial = 0;
3818 #endif
3820 locate_and_pad_parm (mode, NULL_TREE,
3821 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3823 #else
3824 argvec[count].reg != 0,
3825 #endif
3826 argvec[count].partial,
3827 NULL_TREE, &args_size, &argvec[count].locate);
3829 if (argvec[count].locate.size.var)
3830 abort ();
3832 if (argvec[count].reg == 0 || argvec[count].partial != 0
3833 || reg_parm_stack_space > 0)
3834 args_size.constant += argvec[count].locate.size.constant;
3836 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3839 #ifdef FINAL_REG_PARM_STACK_SPACE
3840 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3841 args_size.var);
3842 #endif
3843 /* If this machine requires an external definition for library
3844 functions, write one out. */
3845 assemble_external_libcall (fun);
3847 original_args_size = args_size;
3848 args_size.constant = (((args_size.constant
3849 + stack_pointer_delta
3850 + STACK_BYTES - 1)
3851 / STACK_BYTES
3852 * STACK_BYTES)
3853 - stack_pointer_delta);
3855 args_size.constant = MAX (args_size.constant,
3856 reg_parm_stack_space);
3858 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3859 args_size.constant -= reg_parm_stack_space;
3860 #endif
3862 if (args_size.constant > current_function_outgoing_args_size)
3863 current_function_outgoing_args_size = args_size.constant;
3865 if (ACCUMULATE_OUTGOING_ARGS)
3867 /* Since the stack pointer will never be pushed, it is possible for
3868 the evaluation of a parm to clobber something we have already
3869 written to the stack. Since most function calls on RISC machines
3870 do not use the stack, this is uncommon, but must work correctly.
3872 Therefore, we save any area of the stack that was already written
3873 and that we are using. Here we set up to do this by making a new
3874 stack usage map from the old one.
3876 Another approach might be to try to reorder the argument
3877 evaluations to avoid this conflicting stack usage. */
3879 needed = args_size.constant;
3881 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3882 /* Since we will be writing into the entire argument area, the
3883 map must be allocated for its entire size, not just the part that
3884 is the responsibility of the caller. */
3885 needed += reg_parm_stack_space;
3886 #endif
3888 #ifdef ARGS_GROW_DOWNWARD
3889 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3890 needed + 1);
3891 #else
3892 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3893 needed);
3894 #endif
3895 stack_usage_map = alloca (highest_outgoing_arg_in_use);
3897 if (initial_highest_arg_in_use)
3898 memcpy (stack_usage_map, initial_stack_usage_map,
3899 initial_highest_arg_in_use);
3901 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3902 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3903 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3904 needed = 0;
3906 /* We must be careful to use virtual regs before they're instantiated,
3907 and real regs afterwards. Loop optimization, for example, can create
3908 new libcalls after we've instantiated the virtual regs, and if we
3909 use virtuals anyway, they won't match the rtl patterns. */
3911 if (virtuals_instantiated)
3912 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3913 else
3914 argblock = virtual_outgoing_args_rtx;
3916 else
3918 if (!PUSH_ARGS)
3919 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3922 /* If we push args individually in reverse order, perform stack alignment
3923 before the first push (the last arg). */
3924 if (argblock == 0 && PUSH_ARGS_REVERSED)
3925 anti_adjust_stack (GEN_INT (args_size.constant
3926 - original_args_size.constant));
3928 if (PUSH_ARGS_REVERSED)
3930 inc = -1;
3931 argnum = nargs - 1;
3933 else
3935 inc = 1;
3936 argnum = 0;
3939 #ifdef REG_PARM_STACK_SPACE
3940 if (ACCUMULATE_OUTGOING_ARGS)
3942 /* The argument list is the property of the called routine and it
3943 may clobber it. If the fixed area has been used for previous
3944 parameters, we must save and restore it. */
3945 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3946 &low_to_save, &high_to_save);
3948 #endif
3950 /* Push the args that need to be pushed. */
3952 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3953 are to be pushed. */
3954 for (count = 0; count < nargs; count++, argnum += inc)
3956 enum machine_mode mode = argvec[argnum].mode;
3957 rtx val = argvec[argnum].value;
3958 rtx reg = argvec[argnum].reg;
3959 int partial = argvec[argnum].partial;
3960 int lower_bound = 0, upper_bound = 0, i;
3962 if (! (reg != 0 && partial == 0))
3964 if (ACCUMULATE_OUTGOING_ARGS)
3966 /* If this is being stored into a pre-allocated, fixed-size,
3967 stack area, save any previous data at that location. */
3969 #ifdef ARGS_GROW_DOWNWARD
3970 /* stack_slot is negative, but we want to index stack_usage_map
3971 with positive values. */
3972 upper_bound = -argvec[argnum].locate.offset.constant + 1;
3973 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3974 #else
3975 lower_bound = argvec[argnum].locate.offset.constant;
3976 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3977 #endif
3979 i = lower_bound;
3980 /* Don't worry about things in the fixed argument area;
3981 it has already been saved. */
3982 if (i < reg_parm_stack_space)
3983 i = reg_parm_stack_space;
3984 while (i < upper_bound && stack_usage_map[i] == 0)
3985 i++;
3987 if (i < upper_bound)
3989 /* We need to make a save area. */
3990 unsigned int size
3991 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3992 enum machine_mode save_mode
3993 = mode_for_size (size, MODE_INT, 1);
3994 rtx adr
3995 = plus_constant (argblock,
3996 argvec[argnum].locate.offset.constant);
3997 rtx stack_area
3998 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
3999 argvec[argnum].save_area = gen_reg_rtx (save_mode);
4001 emit_move_insn (argvec[argnum].save_area, stack_area);
4005 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
4006 partial, reg, 0, argblock,
4007 GEN_INT (argvec[argnum].locate.offset.constant),
4008 reg_parm_stack_space,
4009 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
4011 /* Now mark the segment we just used. */
4012 if (ACCUMULATE_OUTGOING_ARGS)
4013 for (i = lower_bound; i < upper_bound; i++)
4014 stack_usage_map[i] = 1;
4016 NO_DEFER_POP;
4020 /* If we pushed args in forward order, perform stack alignment
4021 after pushing the last arg. */
4022 if (argblock == 0 && !PUSH_ARGS_REVERSED)
4023 anti_adjust_stack (GEN_INT (args_size.constant
4024 - original_args_size.constant));
4026 if (PUSH_ARGS_REVERSED)
4027 argnum = nargs - 1;
4028 else
4029 argnum = 0;
4031 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0, 0);
4033 /* Now load any reg parms into their regs. */
4035 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4036 are to be pushed. */
4037 for (count = 0; count < nargs; count++, argnum += inc)
4039 rtx val = argvec[argnum].value;
4040 rtx reg = argvec[argnum].reg;
4041 int partial = argvec[argnum].partial;
4043 /* Handle calls that pass values in multiple non-contiguous
4044 locations. The PA64 has examples of this for library calls. */
4045 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4046 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (GET_MODE (val)));
4047 else if (reg != 0 && partial == 0)
4048 emit_move_insn (reg, val);
4050 NO_DEFER_POP;
4053 /* Any regs containing parms remain in use through the call. */
4054 for (count = 0; count < nargs; count++)
4056 rtx reg = argvec[count].reg;
4057 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4058 use_group_regs (&call_fusage, reg);
4059 else if (reg != 0)
4060 use_reg (&call_fusage, reg);
4063 /* Pass the function the address in which to return a structure value. */
4064 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
4066 emit_move_insn (struct_value_rtx,
4067 force_reg (Pmode,
4068 force_operand (XEXP (mem_value, 0),
4069 NULL_RTX)));
4070 if (GET_CODE (struct_value_rtx) == REG)
4071 use_reg (&call_fusage, struct_value_rtx);
4074 /* Don't allow popping to be deferred, since then
4075 cse'ing of library calls could delete a call and leave the pop. */
4076 NO_DEFER_POP;
4077 valreg = (mem_value == 0 && outmode != VOIDmode
4078 ? hard_libcall_value (outmode) : NULL_RTX);
4080 /* Stack must be properly aligned now. */
4081 if (stack_pointer_delta & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))
4082 abort ();
4084 before_call = get_last_insn ();
4086 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4087 will set inhibit_defer_pop to that value. */
4088 /* The return type is needed to decide how many bytes the function pops.
4089 Signedness plays no role in that, so for simplicity, we pretend it's
4090 always signed. We also assume that the list of arguments passed has
4091 no impact, so we pretend it is unknown. */
4093 emit_call_1 (fun,
4094 get_identifier (XSTR (orgfun, 0)),
4095 build_function_type (tfom, NULL_TREE),
4096 original_args_size.constant, args_size.constant,
4097 struct_value_size,
4098 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
4099 valreg,
4100 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
4102 /* For calls to `setjmp', etc., inform flow.c it should complain
4103 if nonvolatile values are live. For functions that cannot return,
4104 inform flow that control does not fall through. */
4106 if (flags & (ECF_NORETURN | ECF_LONGJMP))
4108 /* The barrier note must be emitted
4109 immediately after the CALL_INSN. Some ports emit more than
4110 just a CALL_INSN above, so we must search for it here. */
4112 rtx last = get_last_insn ();
4113 while (GET_CODE (last) != CALL_INSN)
4115 last = PREV_INSN (last);
4116 /* There was no CALL_INSN? */
4117 if (last == before_call)
4118 abort ();
4121 emit_barrier_after (last);
4124 /* Now restore inhibit_defer_pop to its actual original value. */
4125 OK_DEFER_POP;
4127 /* If call is cse'able, make appropriate pair of reg-notes around it.
4128 Test valreg so we don't crash; may safely ignore `const'
4129 if return type is void. Disable for PARALLEL return values, because
4130 we have no way to move such values into a pseudo register. */
4131 if (flags & ECF_LIBCALL_BLOCK)
4133 rtx insns;
4135 if (valreg == 0)
4137 insns = get_insns ();
4138 end_sequence ();
4139 emit_insn (insns);
4141 else
4143 rtx note = 0;
4144 rtx temp;
4145 int i;
4147 if (GET_CODE (valreg) == PARALLEL)
4149 temp = gen_reg_rtx (outmode);
4150 emit_group_store (temp, valreg, NULL_TREE, outmode);
4151 valreg = temp;
4154 temp = gen_reg_rtx (GET_MODE (valreg));
4156 /* Construct an "equal form" for the value which mentions all the
4157 arguments in order as well as the function name. */
4158 for (i = 0; i < nargs; i++)
4159 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
4160 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
4162 insns = get_insns ();
4163 end_sequence ();
4165 if (flags & ECF_PURE)
4166 note = gen_rtx_EXPR_LIST (VOIDmode,
4167 gen_rtx_USE (VOIDmode,
4168 gen_rtx_MEM (BLKmode,
4169 gen_rtx_SCRATCH (VOIDmode))),
4170 note);
4172 emit_libcall_block (insns, temp, valreg, note);
4174 valreg = temp;
4177 pop_temp_slots ();
4179 /* Copy the value to the right place. */
4180 if (outmode != VOIDmode && retval)
4182 if (mem_value)
4184 if (value == 0)
4185 value = mem_value;
4186 if (value != mem_value)
4187 emit_move_insn (value, mem_value);
4189 else if (GET_CODE (valreg) == PARALLEL)
4191 if (value == 0)
4192 value = gen_reg_rtx (outmode);
4193 emit_group_store (value, valreg, NULL_TREE, outmode);
4195 else if (value != 0)
4196 emit_move_insn (value, valreg);
4197 else
4198 value = valreg;
4201 if (ACCUMULATE_OUTGOING_ARGS)
4203 #ifdef REG_PARM_STACK_SPACE
4204 if (save_area)
4205 restore_fixed_argument_area (save_area, argblock,
4206 high_to_save, low_to_save);
4207 #endif
4209 /* If we saved any argument areas, restore them. */
4210 for (count = 0; count < nargs; count++)
4211 if (argvec[count].save_area)
4213 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
4214 rtx adr = plus_constant (argblock,
4215 argvec[count].locate.offset.constant);
4216 rtx stack_area = gen_rtx_MEM (save_mode,
4217 memory_address (save_mode, adr));
4219 emit_move_insn (stack_area, argvec[count].save_area);
4222 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4223 stack_usage_map = initial_stack_usage_map;
4226 return value;
4230 /* Output a library call to function FUN (a SYMBOL_REF rtx)
4231 (emitting the queue unless NO_QUEUE is nonzero),
4232 for a value of mode OUTMODE,
4233 with NARGS different arguments, passed as alternating rtx values
4234 and machine_modes to convert them to.
4235 The rtx values should have been passed through protect_from_queue already.
4237 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
4238 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
4239 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
4240 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
4241 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
4242 or other LCT_ value for other types of library calls. */
4244 void
4245 emit_library_call (rtx orgfun, enum libcall_type fn_type,
4246 enum machine_mode outmode, int nargs, ...)
4248 va_list p;
4250 va_start (p, nargs);
4251 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4252 va_end (p);
4255 /* Like emit_library_call except that an extra argument, VALUE,
4256 comes second and says where to store the result.
4257 (If VALUE is zero, this function chooses a convenient way
4258 to return the value.
4260 This function returns an rtx for where the value is to be found.
4261 If VALUE is nonzero, VALUE is returned. */
4264 emit_library_call_value (rtx orgfun, rtx value,
4265 enum libcall_type fn_type,
4266 enum machine_mode outmode, int nargs, ...)
4268 rtx result;
4269 va_list p;
4271 va_start (p, nargs);
4272 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4273 nargs, p);
4274 va_end (p);
4276 return result;
4279 /* Store a single argument for a function call
4280 into the register or memory area where it must be passed.
4281 *ARG describes the argument value and where to pass it.
4283 ARGBLOCK is the address of the stack-block for all the arguments,
4284 or 0 on a machine where arguments are pushed individually.
4286 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4287 so must be careful about how the stack is used.
4289 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4290 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4291 that we need not worry about saving and restoring the stack.
4293 FNDECL is the declaration of the function we are calling.
4295 Return nonzero if this arg should cause sibcall failure,
4296 zero otherwise. */
4298 static int
4299 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4300 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4302 tree pval = arg->tree_value;
4303 rtx reg = 0;
4304 int partial = 0;
4305 int used = 0;
4306 int i, lower_bound = 0, upper_bound = 0;
4307 int sibcall_failure = 0;
4309 if (TREE_CODE (pval) == ERROR_MARK)
4310 return 1;
4312 /* Push a new temporary level for any temporaries we make for
4313 this argument. */
4314 push_temp_slots ();
4316 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4318 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4319 save any previous data at that location. */
4320 if (argblock && ! variable_size && arg->stack)
4322 #ifdef ARGS_GROW_DOWNWARD
4323 /* stack_slot is negative, but we want to index stack_usage_map
4324 with positive values. */
4325 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4326 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4327 else
4328 upper_bound = 0;
4330 lower_bound = upper_bound - arg->locate.size.constant;
4331 #else
4332 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4333 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4334 else
4335 lower_bound = 0;
4337 upper_bound = lower_bound + arg->locate.size.constant;
4338 #endif
4340 i = lower_bound;
4341 /* Don't worry about things in the fixed argument area;
4342 it has already been saved. */
4343 if (i < reg_parm_stack_space)
4344 i = reg_parm_stack_space;
4345 while (i < upper_bound && stack_usage_map[i] == 0)
4346 i++;
4348 if (i < upper_bound)
4350 /* We need to make a save area. */
4351 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4352 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4353 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4354 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4356 if (save_mode == BLKmode)
4358 tree ot = TREE_TYPE (arg->tree_value);
4359 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4360 | TYPE_QUAL_CONST));
4362 arg->save_area = assign_temp (nt, 0, 1, 1);
4363 preserve_temp_slots (arg->save_area);
4364 emit_block_move (validize_mem (arg->save_area), stack_area,
4365 expr_size (arg->tree_value),
4366 BLOCK_OP_CALL_PARM);
4368 else
4370 arg->save_area = gen_reg_rtx (save_mode);
4371 emit_move_insn (arg->save_area, stack_area);
4377 /* If this isn't going to be placed on both the stack and in registers,
4378 set up the register and number of words. */
4379 if (! arg->pass_on_stack)
4381 if (flags & ECF_SIBCALL)
4382 reg = arg->tail_call_reg;
4383 else
4384 reg = arg->reg;
4385 partial = arg->partial;
4388 if (reg != 0 && partial == 0)
4389 /* Being passed entirely in a register. We shouldn't be called in
4390 this case. */
4391 abort ();
4393 /* If this arg needs special alignment, don't load the registers
4394 here. */
4395 if (arg->n_aligned_regs != 0)
4396 reg = 0;
4398 /* If this is being passed partially in a register, we can't evaluate
4399 it directly into its stack slot. Otherwise, we can. */
4400 if (arg->value == 0)
4402 /* stack_arg_under_construction is nonzero if a function argument is
4403 being evaluated directly into the outgoing argument list and
4404 expand_call must take special action to preserve the argument list
4405 if it is called recursively.
4407 For scalar function arguments stack_usage_map is sufficient to
4408 determine which stack slots must be saved and restored. Scalar
4409 arguments in general have pass_on_stack == 0.
4411 If this argument is initialized by a function which takes the
4412 address of the argument (a C++ constructor or a C function
4413 returning a BLKmode structure), then stack_usage_map is
4414 insufficient and expand_call must push the stack around the
4415 function call. Such arguments have pass_on_stack == 1.
4417 Note that it is always safe to set stack_arg_under_construction,
4418 but this generates suboptimal code if set when not needed. */
4420 if (arg->pass_on_stack)
4421 stack_arg_under_construction++;
4423 arg->value = expand_expr (pval,
4424 (partial
4425 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4426 ? NULL_RTX : arg->stack,
4427 VOIDmode, EXPAND_STACK_PARM);
4429 /* If we are promoting object (or for any other reason) the mode
4430 doesn't agree, convert the mode. */
4432 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4433 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4434 arg->value, arg->unsignedp);
4436 if (arg->pass_on_stack)
4437 stack_arg_under_construction--;
4440 /* Don't allow anything left on stack from computation
4441 of argument to alloca. */
4442 if (flags & ECF_MAY_BE_ALLOCA)
4443 do_pending_stack_adjust ();
4445 if (arg->value == arg->stack)
4446 /* If the value is already in the stack slot, we are done. */
4448 else if (arg->mode != BLKmode)
4450 int size;
4452 /* Argument is a scalar, not entirely passed in registers.
4453 (If part is passed in registers, arg->partial says how much
4454 and emit_push_insn will take care of putting it there.)
4456 Push it, and if its size is less than the
4457 amount of space allocated to it,
4458 also bump stack pointer by the additional space.
4459 Note that in C the default argument promotions
4460 will prevent such mismatches. */
4462 size = GET_MODE_SIZE (arg->mode);
4463 /* Compute how much space the push instruction will push.
4464 On many machines, pushing a byte will advance the stack
4465 pointer by a halfword. */
4466 #ifdef PUSH_ROUNDING
4467 size = PUSH_ROUNDING (size);
4468 #endif
4469 used = size;
4471 /* Compute how much space the argument should get:
4472 round up to a multiple of the alignment for arguments. */
4473 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4474 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4475 / (PARM_BOUNDARY / BITS_PER_UNIT))
4476 * (PARM_BOUNDARY / BITS_PER_UNIT));
4478 /* This isn't already where we want it on the stack, so put it there.
4479 This can either be done with push or copy insns. */
4480 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4481 PARM_BOUNDARY, partial, reg, used - size, argblock,
4482 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4483 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4485 /* Unless this is a partially-in-register argument, the argument is now
4486 in the stack. */
4487 if (partial == 0)
4488 arg->value = arg->stack;
4490 else
4492 /* BLKmode, at least partly to be pushed. */
4494 unsigned int parm_align;
4495 int excess;
4496 rtx size_rtx;
4498 /* Pushing a nonscalar.
4499 If part is passed in registers, PARTIAL says how much
4500 and emit_push_insn will take care of putting it there. */
4502 /* Round its size up to a multiple
4503 of the allocation unit for arguments. */
4505 if (arg->locate.size.var != 0)
4507 excess = 0;
4508 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4510 else
4512 /* PUSH_ROUNDING has no effect on us, because
4513 emit_push_insn for BLKmode is careful to avoid it. */
4514 excess = (arg->locate.size.constant
4515 - int_size_in_bytes (TREE_TYPE (pval))
4516 + partial * UNITS_PER_WORD);
4517 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4518 NULL_RTX, TYPE_MODE (sizetype), 0);
4521 /* Some types will require stricter alignment, which will be
4522 provided for elsewhere in argument layout. */
4523 parm_align = MAX (PARM_BOUNDARY, TYPE_ALIGN (TREE_TYPE (pval)));
4525 /* When an argument is padded down, the block is aligned to
4526 PARM_BOUNDARY, but the actual argument isn't. */
4527 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4529 if (arg->locate.size.var)
4530 parm_align = BITS_PER_UNIT;
4531 else if (excess)
4533 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4534 parm_align = MIN (parm_align, excess_align);
4538 if ((flags & ECF_SIBCALL) && GET_CODE (arg->value) == MEM)
4540 /* emit_push_insn might not work properly if arg->value and
4541 argblock + arg->locate.offset areas overlap. */
4542 rtx x = arg->value;
4543 int i = 0;
4545 if (XEXP (x, 0) == current_function_internal_arg_pointer
4546 || (GET_CODE (XEXP (x, 0)) == PLUS
4547 && XEXP (XEXP (x, 0), 0) ==
4548 current_function_internal_arg_pointer
4549 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4551 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4552 i = INTVAL (XEXP (XEXP (x, 0), 1));
4554 /* expand_call should ensure this */
4555 if (arg->locate.offset.var || GET_CODE (size_rtx) != CONST_INT)
4556 abort ();
4558 if (arg->locate.offset.constant > i)
4560 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4561 sibcall_failure = 1;
4563 else if (arg->locate.offset.constant < i)
4565 if (i < arg->locate.offset.constant + INTVAL (size_rtx))
4566 sibcall_failure = 1;
4571 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4572 parm_align, partial, reg, excess, argblock,
4573 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4574 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4576 /* Unless this is a partially-in-register argument, the argument is now
4577 in the stack.
4579 ??? Unlike the case above, in which we want the actual
4580 address of the data, so that we can load it directly into a
4581 register, here we want the address of the stack slot, so that
4582 it's properly aligned for word-by-word copying or something
4583 like that. It's not clear that this is always correct. */
4584 if (partial == 0)
4585 arg->value = arg->stack_slot;
4588 /* Mark all slots this store used. */
4589 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4590 && argblock && ! variable_size && arg->stack)
4591 for (i = lower_bound; i < upper_bound; i++)
4592 stack_usage_map[i] = 1;
4594 /* Once we have pushed something, pops can't safely
4595 be deferred during the rest of the arguments. */
4596 NO_DEFER_POP;
4598 /* ANSI doesn't require a sequence point here,
4599 but PCC has one, so this will avoid some problems. */
4600 emit_queue ();
4602 /* Free any temporary slots made in processing this argument. Show
4603 that we might have taken the address of something and pushed that
4604 as an operand. */
4605 preserve_temp_slots (NULL_RTX);
4606 free_temp_slots ();
4607 pop_temp_slots ();
4609 return sibcall_failure;
4612 /* Nonzero if we do not know how to pass TYPE solely in registers.
4613 We cannot do so in the following cases:
4615 - if the type has variable size
4616 - if the type is marked as addressable (it is required to be constructed
4617 into the stack)
4618 - if the padding and mode of the type is such that a copy into a register
4619 would put it into the wrong part of the register.
4621 Which padding can't be supported depends on the byte endianness.
4623 A value in a register is implicitly padded at the most significant end.
4624 On a big-endian machine, that is the lower end in memory.
4625 So a value padded in memory at the upper end can't go in a register.
4626 For a little-endian machine, the reverse is true. */
4628 bool
4629 default_must_pass_in_stack (enum machine_mode mode, tree type)
4631 if (!type)
4632 return false;
4634 /* If the type has variable size... */
4635 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4636 return true;
4638 /* If the type is marked as addressable (it is required
4639 to be constructed into the stack)... */
4640 if (TREE_ADDRESSABLE (type))
4641 return true;
4643 /* If the padding and mode of the type is such that a copy into
4644 a register would put it into the wrong part of the register. */
4645 if (mode == BLKmode
4646 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4647 && (FUNCTION_ARG_PADDING (mode, type)
4648 == (BYTES_BIG_ENDIAN ? upward : downward)))
4649 return true;
4651 return false;