Fix v850 multilibs so that there is only one copy of the default multilib
[official-gcc.git] / gcc / calls.c
blobdc3da0a91749720823b3b7d21a066118f6dd027e
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "expr.h"
30 #include "optabs.h"
31 #include "libfuncs.h"
32 #include "function.h"
33 #include "regs.h"
34 #include "toplev.h"
35 #include "output.h"
36 #include "tm_p.h"
37 #include "timevar.h"
38 #include "sbitmap.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "cgraph.h"
42 #include "except.h"
44 #ifndef STACK_POINTER_OFFSET
45 #define STACK_POINTER_OFFSET 0
46 #endif
48 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
49 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
51 /* Data structure and subroutines used within expand_call. */
53 struct arg_data
55 /* Tree node for this argument. */
56 tree tree_value;
57 /* Mode for value; TYPE_MODE unless promoted. */
58 enum machine_mode mode;
59 /* Current RTL value for argument, or 0 if it isn't precomputed. */
60 rtx value;
61 /* Initially-compute RTL value for argument; only for const functions. */
62 rtx initial_value;
63 /* Register to pass this argument in, 0 if passed on stack, or an
64 PARALLEL if the arg is to be copied into multiple non-contiguous
65 registers. */
66 rtx reg;
67 /* Register to pass this argument in when generating tail call sequence.
68 This is not the same register as for normal calls on machines with
69 register windows. */
70 rtx tail_call_reg;
71 /* If REG was promoted from the actual mode of the argument expression,
72 indicates whether the promotion is sign- or zero-extended. */
73 int unsignedp;
74 /* Number of registers to use. 0 means put the whole arg in registers.
75 Also 0 if not passed in registers. */
76 int partial;
77 /* Nonzero if argument must be passed on stack.
78 Note that some arguments may be passed on the stack
79 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
80 pass_on_stack identifies arguments that *cannot* go in registers. */
81 int pass_on_stack;
82 /* Some fields packaged up for locate_and_pad_parm. */
83 struct locate_and_pad_arg_data locate;
84 /* Location on the stack at which parameter should be stored. The store
85 has already been done if STACK == VALUE. */
86 rtx stack;
87 /* Location on the stack of the start of this argument slot. This can
88 differ from STACK if this arg pads downward. This location is known
89 to be aligned to FUNCTION_ARG_BOUNDARY. */
90 rtx stack_slot;
91 /* Place that this stack area has been saved, if needed. */
92 rtx save_area;
93 /* If an argument's alignment does not permit direct copying into registers,
94 copy in smaller-sized pieces into pseudos. These are stored in a
95 block pointed to by this field. The next field says how many
96 word-sized pseudos we made. */
97 rtx *aligned_regs;
98 int n_aligned_regs;
101 /* A vector of one char per byte of stack space. A byte if nonzero if
102 the corresponding stack location has been used.
103 This vector is used to prevent a function call within an argument from
104 clobbering any stack already set up. */
105 static char *stack_usage_map;
107 /* Size of STACK_USAGE_MAP. */
108 static int highest_outgoing_arg_in_use;
110 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
111 stack location's tail call argument has been already stored into the stack.
112 This bitmap is used to prevent sibling call optimization if function tries
113 to use parent's incoming argument slots when they have been already
114 overwritten with tail call arguments. */
115 static sbitmap stored_args_map;
117 /* stack_arg_under_construction is nonzero when an argument may be
118 initialized with a constructor call (including a C function that
119 returns a BLKmode struct) and expand_call must take special action
120 to make sure the object being constructed does not overlap the
121 argument list for the constructor call. */
122 int stack_arg_under_construction;
124 static int calls_function (tree, int);
125 static int calls_function_1 (tree, int);
127 static void emit_call_1 (rtx, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
128 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
129 CUMULATIVE_ARGS *);
130 static void precompute_register_parameters (int, struct arg_data *, int *);
131 static int store_one_arg (struct arg_data *, rtx, int, int, int);
132 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
133 static int finalize_must_preallocate (int, int, struct arg_data *,
134 struct args_size *);
135 static void precompute_arguments (int, int, struct arg_data *);
136 static int compute_argument_block_size (int, struct args_size *, int);
137 static void initialize_argument_information (int, struct arg_data *,
138 struct args_size *, int, tree,
139 tree, CUMULATIVE_ARGS *, int,
140 rtx *, int *, int *, int *);
141 static void compute_argument_addresses (struct arg_data *, rtx, int);
142 static rtx rtx_for_function_call (tree, tree);
143 static void load_register_parameters (struct arg_data *, int, rtx *, int,
144 int, int *);
145 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
146 enum machine_mode, int, va_list);
147 static int special_function_p (tree, int);
148 static rtx try_to_integrate (tree, tree, rtx, int, tree, rtx);
149 static int check_sibcall_argument_overlap_1 (rtx);
150 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
152 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
153 int);
154 static tree fix_unsafe_tree (tree);
156 #ifdef REG_PARM_STACK_SPACE
157 static rtx save_fixed_argument_area (int, rtx, int *, int *);
158 static void restore_fixed_argument_area (rtx, rtx, int, int);
159 #endif
161 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
162 `alloca'.
164 If WHICH is 0, return 1 if EXP contains a call to any function.
165 Actually, we only need return 1 if evaluating EXP would require pushing
166 arguments on the stack, but that is too difficult to compute, so we just
167 assume any function call might require the stack. */
169 static tree calls_function_save_exprs;
171 static int
172 calls_function (tree exp, int which)
174 int val;
176 calls_function_save_exprs = 0;
177 val = calls_function_1 (exp, which);
178 calls_function_save_exprs = 0;
179 return val;
182 /* Recursive function to do the work of above function. */
184 static int
185 calls_function_1 (tree exp, int which)
187 int i;
188 enum tree_code code = TREE_CODE (exp);
189 int class = TREE_CODE_CLASS (code);
190 int length = first_rtl_op (code);
192 /* If this code is language-specific, we don't know what it will do. */
193 if ((int) code >= NUM_TREE_CODES)
194 return 1;
196 switch (code)
198 case CALL_EXPR:
199 if (which == 0)
200 return 1;
201 else if ((TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
202 == FUNCTION_TYPE)
203 && (TYPE_RETURNS_STACK_DEPRESSED
204 (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
205 return 1;
206 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
207 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
208 == FUNCTION_DECL)
209 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
211 & ECF_MAY_BE_ALLOCA))
212 return 1;
214 break;
216 case CONSTRUCTOR:
218 tree tem;
220 for (tem = CONSTRUCTOR_ELTS (exp); tem != 0; tem = TREE_CHAIN (tem))
221 if (calls_function_1 (TREE_VALUE (tem), which))
222 return 1;
225 return 0;
227 case SAVE_EXPR:
228 if (SAVE_EXPR_RTL (exp) != 0)
229 return 0;
230 if (value_member (exp, calls_function_save_exprs))
231 return 0;
232 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
233 calls_function_save_exprs);
234 return (TREE_OPERAND (exp, 0) != 0
235 && calls_function_1 (TREE_OPERAND (exp, 0), which));
237 case BLOCK:
239 tree local;
240 tree subblock;
242 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
243 if (DECL_INITIAL (local) != 0
244 && calls_function_1 (DECL_INITIAL (local), which))
245 return 1;
247 for (subblock = BLOCK_SUBBLOCKS (exp);
248 subblock;
249 subblock = TREE_CHAIN (subblock))
250 if (calls_function_1 (subblock, which))
251 return 1;
253 return 0;
255 case TREE_LIST:
256 for (; exp != 0; exp = TREE_CHAIN (exp))
257 if (calls_function_1 (TREE_VALUE (exp), which))
258 return 1;
259 return 0;
261 default:
262 break;
265 /* Only expressions and blocks can contain calls. */
266 if (! IS_EXPR_CODE_CLASS (class) && class != 'b')
267 return 0;
269 for (i = 0; i < length; i++)
270 if (TREE_OPERAND (exp, i) != 0
271 && calls_function_1 (TREE_OPERAND (exp, i), which))
272 return 1;
274 return 0;
277 /* Force FUNEXP into a form suitable for the address of a CALL,
278 and return that as an rtx. Also load the static chain register
279 if FNDECL is a nested function.
281 CALL_FUSAGE points to a variable holding the prospective
282 CALL_INSN_FUNCTION_USAGE information. */
285 prepare_call_address (rtx funexp, tree fndecl, rtx *call_fusage,
286 int reg_parm_seen, int sibcallp)
288 rtx static_chain_value = 0;
290 funexp = protect_from_queue (funexp, 0);
292 if (fndecl != 0)
293 /* Get possible static chain value for nested function in C. */
294 static_chain_value = lookup_static_chain (fndecl);
296 /* Make a valid memory address and copy constants thru pseudo-regs,
297 but not for a constant address if -fno-function-cse. */
298 if (GET_CODE (funexp) != SYMBOL_REF)
299 /* If we are using registers for parameters, force the
300 function address into a register now. */
301 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
302 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
303 : memory_address (FUNCTION_MODE, funexp));
304 else if (! sibcallp)
306 #ifndef NO_FUNCTION_CSE
307 if (optimize && ! flag_no_function_cse)
308 #ifdef NO_RECURSIVE_FUNCTION_CSE
309 if (fndecl != current_function_decl)
310 #endif
311 funexp = force_reg (Pmode, funexp);
312 #endif
315 if (static_chain_value != 0)
317 emit_move_insn (static_chain_rtx, static_chain_value);
319 if (GET_CODE (static_chain_rtx) == REG)
320 use_reg (call_fusage, static_chain_rtx);
323 return funexp;
326 /* Generate instructions to call function FUNEXP,
327 and optionally pop the results.
328 The CALL_INSN is the first insn generated.
330 FNDECL is the declaration node of the function. This is given to the
331 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
333 FUNTYPE is the data type of the function. This is given to the macro
334 RETURN_POPS_ARGS to determine whether this function pops its own args.
335 We used to allow an identifier for library functions, but that doesn't
336 work when the return type is an aggregate type and the calling convention
337 says that the pointer to this aggregate is to be popped by the callee.
339 STACK_SIZE is the number of bytes of arguments on the stack,
340 ROUNDED_STACK_SIZE is that number rounded up to
341 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
342 both to put into the call insn and to generate explicit popping
343 code if necessary.
345 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
346 It is zero if this call doesn't want a structure value.
348 NEXT_ARG_REG is the rtx that results from executing
349 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
350 just after all the args have had their registers assigned.
351 This could be whatever you like, but normally it is the first
352 arg-register beyond those used for args in this call,
353 or 0 if all the arg-registers are used in this call.
354 It is passed on to `gen_call' so you can put this info in the call insn.
356 VALREG is a hard register in which a value is returned,
357 or 0 if the call does not return a value.
359 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
360 the args to this call were processed.
361 We restore `inhibit_defer_pop' to that value.
363 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
364 denote registers used by the called function. */
366 static void
367 emit_call_1 (rtx funexp, tree fndecl ATTRIBUTE_UNUSED, tree funtype ATTRIBUTE_UNUSED,
368 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
369 HOST_WIDE_INT rounded_stack_size,
370 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
371 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
372 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
373 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
375 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
376 rtx call_insn;
377 int already_popped = 0;
378 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
379 #if defined (HAVE_call) && defined (HAVE_call_value)
380 rtx struct_value_size_rtx;
381 struct_value_size_rtx = GEN_INT (struct_value_size);
382 #endif
384 #ifdef CALL_POPS_ARGS
385 n_popped += CALL_POPS_ARGS (* args_so_far);
386 #endif
388 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
389 and we don't want to load it into a register as an optimization,
390 because prepare_call_address already did it if it should be done. */
391 if (GET_CODE (funexp) != SYMBOL_REF)
392 funexp = memory_address (FUNCTION_MODE, funexp);
394 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
395 if ((ecf_flags & ECF_SIBCALL)
396 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
397 && (n_popped > 0 || stack_size == 0))
399 rtx n_pop = GEN_INT (n_popped);
400 rtx pat;
402 /* If this subroutine pops its own args, record that in the call insn
403 if possible, for the sake of frame pointer elimination. */
405 if (valreg)
406 pat = GEN_SIBCALL_VALUE_POP (valreg,
407 gen_rtx_MEM (FUNCTION_MODE, funexp),
408 rounded_stack_size_rtx, next_arg_reg,
409 n_pop);
410 else
411 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
412 rounded_stack_size_rtx, next_arg_reg, n_pop);
414 emit_call_insn (pat);
415 already_popped = 1;
417 else
418 #endif
420 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
421 /* If the target has "call" or "call_value" insns, then prefer them
422 if no arguments are actually popped. If the target does not have
423 "call" or "call_value" insns, then we must use the popping versions
424 even if the call has no arguments to pop. */
425 #if defined (HAVE_call) && defined (HAVE_call_value)
426 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
427 && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
428 #else
429 if (HAVE_call_pop && HAVE_call_value_pop)
430 #endif
432 rtx n_pop = GEN_INT (n_popped);
433 rtx pat;
435 /* If this subroutine pops its own args, record that in the call insn
436 if possible, for the sake of frame pointer elimination. */
438 if (valreg)
439 pat = GEN_CALL_VALUE_POP (valreg,
440 gen_rtx_MEM (FUNCTION_MODE, funexp),
441 rounded_stack_size_rtx, next_arg_reg, n_pop);
442 else
443 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
444 rounded_stack_size_rtx, next_arg_reg, n_pop);
446 emit_call_insn (pat);
447 already_popped = 1;
449 else
450 #endif
452 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
453 if ((ecf_flags & ECF_SIBCALL)
454 && HAVE_sibcall && HAVE_sibcall_value)
456 if (valreg)
457 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
458 gen_rtx_MEM (FUNCTION_MODE, funexp),
459 rounded_stack_size_rtx,
460 next_arg_reg, NULL_RTX));
461 else
462 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
463 rounded_stack_size_rtx, next_arg_reg,
464 struct_value_size_rtx));
466 else
467 #endif
469 #if defined (HAVE_call) && defined (HAVE_call_value)
470 if (HAVE_call && HAVE_call_value)
472 if (valreg)
473 emit_call_insn (GEN_CALL_VALUE (valreg,
474 gen_rtx_MEM (FUNCTION_MODE, funexp),
475 rounded_stack_size_rtx, next_arg_reg,
476 NULL_RTX));
477 else
478 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
479 rounded_stack_size_rtx, next_arg_reg,
480 struct_value_size_rtx));
482 else
483 #endif
484 abort ();
486 /* Find the call we just emitted. */
487 call_insn = last_call_insn ();
489 /* Mark memory as used for "pure" function call. */
490 if (ecf_flags & ECF_PURE)
491 call_fusage
492 = gen_rtx_EXPR_LIST
493 (VOIDmode,
494 gen_rtx_USE (VOIDmode,
495 gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
496 call_fusage);
498 /* Put the register usage information there. */
499 add_function_usage_to (call_insn, call_fusage);
501 /* If this is a const call, then set the insn's unchanging bit. */
502 if (ecf_flags & (ECF_CONST | ECF_PURE))
503 CONST_OR_PURE_CALL_P (call_insn) = 1;
505 /* If this call can't throw, attach a REG_EH_REGION reg note to that
506 effect. */
507 if (ecf_flags & ECF_NOTHROW)
508 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
509 REG_NOTES (call_insn));
510 else
511 note_eh_region_may_contain_throw ();
513 if (ecf_flags & ECF_NORETURN)
514 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
515 REG_NOTES (call_insn));
516 if (ecf_flags & ECF_ALWAYS_RETURN)
517 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_ALWAYS_RETURN, const0_rtx,
518 REG_NOTES (call_insn));
520 if (ecf_flags & ECF_RETURNS_TWICE)
522 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
523 REG_NOTES (call_insn));
524 current_function_calls_setjmp = 1;
527 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
529 /* Restore this now, so that we do defer pops for this call's args
530 if the context of the call as a whole permits. */
531 inhibit_defer_pop = old_inhibit_defer_pop;
533 /* Don't bother cleaning up after a noreturn function. */
534 if (ecf_flags & (ECF_NORETURN | ECF_LONGJMP))
535 return;
537 if (n_popped > 0)
539 if (!already_popped)
540 CALL_INSN_FUNCTION_USAGE (call_insn)
541 = gen_rtx_EXPR_LIST (VOIDmode,
542 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
543 CALL_INSN_FUNCTION_USAGE (call_insn));
544 rounded_stack_size -= n_popped;
545 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
546 stack_pointer_delta -= n_popped;
549 if (!ACCUMULATE_OUTGOING_ARGS)
551 /* If returning from the subroutine does not automatically pop the args,
552 we need an instruction to pop them sooner or later.
553 Perhaps do it now; perhaps just record how much space to pop later.
555 If returning from the subroutine does pop the args, indicate that the
556 stack pointer will be changed. */
558 if (rounded_stack_size != 0)
560 if (ecf_flags & ECF_SP_DEPRESSED)
561 /* Just pretend we did the pop. */
562 stack_pointer_delta -= rounded_stack_size;
563 else if (flag_defer_pop && inhibit_defer_pop == 0
564 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
565 pending_stack_adjust += rounded_stack_size;
566 else
567 adjust_stack (rounded_stack_size_rtx);
570 /* When we accumulate outgoing args, we must avoid any stack manipulations.
571 Restore the stack pointer to its original value now. Usually
572 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
573 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
574 popping variants of functions exist as well.
576 ??? We may optimize similar to defer_pop above, but it is
577 probably not worthwhile.
579 ??? It will be worthwhile to enable combine_stack_adjustments even for
580 such machines. */
581 else if (n_popped)
582 anti_adjust_stack (GEN_INT (n_popped));
585 /* Determine if the function identified by NAME and FNDECL is one with
586 special properties we wish to know about.
588 For example, if the function might return more than one time (setjmp), then
589 set RETURNS_TWICE to a nonzero value.
591 Similarly set LONGJMP for if the function is in the longjmp family.
593 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
594 space from the stack such as alloca. */
596 static int
597 special_function_p (tree fndecl, int flags)
599 if (! (flags & ECF_MALLOC)
600 && fndecl && DECL_NAME (fndecl)
601 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
602 /* Exclude functions not at the file scope, or not `extern',
603 since they are not the magic functions we would otherwise
604 think they are.
605 FIXME: this should be handled with attributes, not with this
606 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
607 because you can declare fork() inside a function if you
608 wish. */
609 && (DECL_CONTEXT (fndecl) == NULL_TREE
610 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
611 && TREE_PUBLIC (fndecl))
613 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
614 const char *tname = name;
616 /* We assume that alloca will always be called by name. It
617 makes no sense to pass it as a pointer-to-function to
618 anything that does not understand its behavior. */
619 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
620 && name[0] == 'a'
621 && ! strcmp (name, "alloca"))
622 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
623 && name[0] == '_'
624 && ! strcmp (name, "__builtin_alloca"))))
625 flags |= ECF_MAY_BE_ALLOCA;
627 /* Disregard prefix _, __ or __x. */
628 if (name[0] == '_')
630 if (name[1] == '_' && name[2] == 'x')
631 tname += 3;
632 else if (name[1] == '_')
633 tname += 2;
634 else
635 tname += 1;
638 if (tname[0] == 's')
640 if ((tname[1] == 'e'
641 && (! strcmp (tname, "setjmp")
642 || ! strcmp (tname, "setjmp_syscall")))
643 || (tname[1] == 'i'
644 && ! strcmp (tname, "sigsetjmp"))
645 || (tname[1] == 'a'
646 && ! strcmp (tname, "savectx")))
647 flags |= ECF_RETURNS_TWICE;
649 if (tname[1] == 'i'
650 && ! strcmp (tname, "siglongjmp"))
651 flags |= ECF_LONGJMP;
653 else if ((tname[0] == 'q' && tname[1] == 's'
654 && ! strcmp (tname, "qsetjmp"))
655 || (tname[0] == 'v' && tname[1] == 'f'
656 && ! strcmp (tname, "vfork")))
657 flags |= ECF_RETURNS_TWICE;
659 else if (tname[0] == 'l' && tname[1] == 'o'
660 && ! strcmp (tname, "longjmp"))
661 flags |= ECF_LONGJMP;
663 else if ((tname[0] == 'f' && tname[1] == 'o'
664 && ! strcmp (tname, "fork"))
665 /* Linux specific: __clone. check NAME to insist on the
666 leading underscores, to avoid polluting the ISO / POSIX
667 namespace. */
668 || (name[0] == '_' && name[1] == '_'
669 && ! strcmp (tname, "clone"))
670 || (tname[0] == 'e' && tname[1] == 'x' && tname[2] == 'e'
671 && tname[3] == 'c' && (tname[4] == 'l' || tname[4] == 'v')
672 && (tname[5] == '\0'
673 || ((tname[5] == 'p' || tname[5] == 'e')
674 && tname[6] == '\0'))))
675 flags |= ECF_FORK_OR_EXEC;
677 return flags;
680 /* Return nonzero when tree represent call to longjmp. */
683 setjmp_call_p (tree fndecl)
685 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
688 /* Return true when exp contains alloca call. */
689 bool
690 alloca_call_p (tree exp)
692 if (TREE_CODE (exp) == CALL_EXPR
693 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
694 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
695 == FUNCTION_DECL)
696 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
697 0) & ECF_MAY_BE_ALLOCA))
698 return true;
699 return false;
702 /* Detect flags (function attributes) from the function decl or type node. */
705 flags_from_decl_or_type (tree exp)
707 int flags = 0;
708 tree type = exp;
710 if (DECL_P (exp))
712 struct cgraph_rtl_info *i = cgraph_rtl_info (exp);
713 type = TREE_TYPE (exp);
715 if (i)
717 if (i->pure_function)
718 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
719 if (i->const_function)
720 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
723 /* The function exp may have the `malloc' attribute. */
724 if (DECL_IS_MALLOC (exp))
725 flags |= ECF_MALLOC;
727 /* The function exp may have the `pure' attribute. */
728 if (DECL_IS_PURE (exp))
729 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
731 if (TREE_NOTHROW (exp))
732 flags |= ECF_NOTHROW;
734 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
735 flags |= ECF_LIBCALL_BLOCK;
738 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
739 flags |= ECF_CONST;
741 if (TREE_THIS_VOLATILE (exp))
742 flags |= ECF_NORETURN;
744 /* Mark if the function returns with the stack pointer depressed. We
745 cannot consider it pure or constant in that case. */
746 if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
748 flags |= ECF_SP_DEPRESSED;
749 flags &= ~(ECF_PURE | ECF_CONST | ECF_LIBCALL_BLOCK);
752 return flags;
755 /* Precompute all register parameters as described by ARGS, storing values
756 into fields within the ARGS array.
758 NUM_ACTUALS indicates the total number elements in the ARGS array.
760 Set REG_PARM_SEEN if we encounter a register parameter. */
762 static void
763 precompute_register_parameters (int num_actuals, struct arg_data *args, int *reg_parm_seen)
765 int i;
767 *reg_parm_seen = 0;
769 for (i = 0; i < num_actuals; i++)
770 if (args[i].reg != 0 && ! args[i].pass_on_stack)
772 *reg_parm_seen = 1;
774 if (args[i].value == 0)
776 push_temp_slots ();
777 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
778 VOIDmode, 0);
779 preserve_temp_slots (args[i].value);
780 pop_temp_slots ();
782 /* ANSI doesn't require a sequence point here,
783 but PCC has one, so this will avoid some problems. */
784 emit_queue ();
787 /* If the value is a non-legitimate constant, force it into a
788 pseudo now. TLS symbols sometimes need a call to resolve. */
789 if (CONSTANT_P (args[i].value)
790 && !LEGITIMATE_CONSTANT_P (args[i].value))
791 args[i].value = force_reg (args[i].mode, args[i].value);
793 /* If we are to promote the function arg to a wider mode,
794 do it now. */
796 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
797 args[i].value
798 = convert_modes (args[i].mode,
799 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
800 args[i].value, args[i].unsignedp);
802 /* If the value is expensive, and we are inside an appropriately
803 short loop, put the value into a pseudo and then put the pseudo
804 into the hard reg.
806 For small register classes, also do this if this call uses
807 register parameters. This is to avoid reload conflicts while
808 loading the parameters registers. */
810 if ((! (GET_CODE (args[i].value) == REG
811 || (GET_CODE (args[i].value) == SUBREG
812 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
813 && args[i].mode != BLKmode
814 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
815 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
816 || preserve_subexpressions_p ()))
817 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
821 #ifdef REG_PARM_STACK_SPACE
823 /* The argument list is the property of the called routine and it
824 may clobber it. If the fixed area has been used for previous
825 parameters, we must save and restore it. */
827 static rtx
828 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
830 int low;
831 int high;
833 /* Compute the boundary of the area that needs to be saved, if any. */
834 high = reg_parm_stack_space;
835 #ifdef ARGS_GROW_DOWNWARD
836 high += 1;
837 #endif
838 if (high > highest_outgoing_arg_in_use)
839 high = highest_outgoing_arg_in_use;
841 for (low = 0; low < high; low++)
842 if (stack_usage_map[low] != 0)
844 int num_to_save;
845 enum machine_mode save_mode;
846 int delta;
847 rtx stack_area;
848 rtx save_area;
850 while (stack_usage_map[--high] == 0)
853 *low_to_save = low;
854 *high_to_save = high;
856 num_to_save = high - low + 1;
857 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
859 /* If we don't have the required alignment, must do this
860 in BLKmode. */
861 if ((low & (MIN (GET_MODE_SIZE (save_mode),
862 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
863 save_mode = BLKmode;
865 #ifdef ARGS_GROW_DOWNWARD
866 delta = -high;
867 #else
868 delta = low;
869 #endif
870 stack_area = gen_rtx_MEM (save_mode,
871 memory_address (save_mode,
872 plus_constant (argblock,
873 delta)));
875 set_mem_align (stack_area, PARM_BOUNDARY);
876 if (save_mode == BLKmode)
878 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
879 emit_block_move (validize_mem (save_area), stack_area,
880 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
882 else
884 save_area = gen_reg_rtx (save_mode);
885 emit_move_insn (save_area, stack_area);
888 return save_area;
891 return NULL_RTX;
894 static void
895 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
897 enum machine_mode save_mode = GET_MODE (save_area);
898 int delta;
899 rtx stack_area;
901 #ifdef ARGS_GROW_DOWNWARD
902 delta = -high_to_save;
903 #else
904 delta = low_to_save;
905 #endif
906 stack_area = gen_rtx_MEM (save_mode,
907 memory_address (save_mode,
908 plus_constant (argblock, delta)));
909 set_mem_align (stack_area, PARM_BOUNDARY);
911 if (save_mode != BLKmode)
912 emit_move_insn (stack_area, save_area);
913 else
914 emit_block_move (stack_area, validize_mem (save_area),
915 GEN_INT (high_to_save - low_to_save + 1),
916 BLOCK_OP_CALL_PARM);
918 #endif /* REG_PARM_STACK_SPACE */
920 /* If any elements in ARGS refer to parameters that are to be passed in
921 registers, but not in memory, and whose alignment does not permit a
922 direct copy into registers. Copy the values into a group of pseudos
923 which we will later copy into the appropriate hard registers.
925 Pseudos for each unaligned argument will be stored into the array
926 args[argnum].aligned_regs. The caller is responsible for deallocating
927 the aligned_regs array if it is nonzero. */
929 static void
930 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
932 int i, j;
934 for (i = 0; i < num_actuals; i++)
935 if (args[i].reg != 0 && ! args[i].pass_on_stack
936 && args[i].mode == BLKmode
937 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
938 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
940 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
941 int nregs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
942 int endian_correction = 0;
944 args[i].n_aligned_regs = args[i].partial ? args[i].partial : nregs;
945 args[i].aligned_regs = xmalloc (sizeof (rtx) * args[i].n_aligned_regs);
947 /* Structures smaller than a word are normally aligned to the
948 least significant byte. On a BYTES_BIG_ENDIAN machine,
949 this means we must skip the empty high order bytes when
950 calculating the bit offset. */
951 if (bytes < UNITS_PER_WORD
952 #ifdef BLOCK_REG_PADDING
953 && (BLOCK_REG_PADDING (args[i].mode,
954 TREE_TYPE (args[i].tree_value), 1)
955 == downward)
956 #else
957 && BYTES_BIG_ENDIAN
958 #endif
960 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
962 for (j = 0; j < args[i].n_aligned_regs; j++)
964 rtx reg = gen_reg_rtx (word_mode);
965 rtx word = operand_subword_force (args[i].value, j, BLKmode);
966 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
968 args[i].aligned_regs[j] = reg;
969 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
970 word_mode, word_mode, BITS_PER_WORD);
972 /* There is no need to restrict this code to loading items
973 in TYPE_ALIGN sized hunks. The bitfield instructions can
974 load up entire word sized registers efficiently.
976 ??? This may not be needed anymore.
977 We use to emit a clobber here but that doesn't let later
978 passes optimize the instructions we emit. By storing 0 into
979 the register later passes know the first AND to zero out the
980 bitfield being set in the register is unnecessary. The store
981 of 0 will be deleted as will at least the first AND. */
983 emit_move_insn (reg, const0_rtx);
985 bytes -= bitsize / BITS_PER_UNIT;
986 store_bit_field (reg, bitsize, endian_correction, word_mode,
987 word, BITS_PER_WORD);
992 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
993 ACTPARMS.
995 NUM_ACTUALS is the total number of parameters.
997 N_NAMED_ARGS is the total number of named arguments.
999 FNDECL is the tree code for the target of this call (if known)
1001 ARGS_SO_FAR holds state needed by the target to know where to place
1002 the next argument.
1004 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1005 for arguments which are passed in registers.
1007 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1008 and may be modified by this routine.
1010 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1011 flags which may may be modified by this routine. */
1013 static void
1014 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
1015 struct arg_data *args,
1016 struct args_size *args_size,
1017 int n_named_args ATTRIBUTE_UNUSED,
1018 tree actparms, tree fndecl,
1019 CUMULATIVE_ARGS *args_so_far,
1020 int reg_parm_stack_space,
1021 rtx *old_stack_level, int *old_pending_adj,
1022 int *must_preallocate, int *ecf_flags)
1024 /* 1 if scanning parms front to back, -1 if scanning back to front. */
1025 int inc;
1027 /* Count arg position in order args appear. */
1028 int argpos;
1030 int i;
1031 tree p;
1033 args_size->constant = 0;
1034 args_size->var = 0;
1036 /* In this loop, we consider args in the order they are written.
1037 We fill up ARGS from the front or from the back if necessary
1038 so that in any case the first arg to be pushed ends up at the front. */
1040 if (PUSH_ARGS_REVERSED)
1042 i = num_actuals - 1, inc = -1;
1043 /* In this case, must reverse order of args
1044 so that we compute and push the last arg first. */
1046 else
1048 i = 0, inc = 1;
1051 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1052 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
1054 tree type = TREE_TYPE (TREE_VALUE (p));
1055 int unsignedp;
1056 enum machine_mode mode;
1058 args[i].tree_value = TREE_VALUE (p);
1060 /* Replace erroneous argument with constant zero. */
1061 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1062 args[i].tree_value = integer_zero_node, type = integer_type_node;
1064 /* If TYPE is a transparent union, pass things the way we would
1065 pass the first field of the union. We have already verified that
1066 the modes are the same. */
1067 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
1068 type = TREE_TYPE (TYPE_FIELDS (type));
1070 /* Decide where to pass this arg.
1072 args[i].reg is nonzero if all or part is passed in registers.
1074 args[i].partial is nonzero if part but not all is passed in registers,
1075 and the exact value says how many words are passed in registers.
1077 args[i].pass_on_stack is nonzero if the argument must at least be
1078 computed on the stack. It may then be loaded back into registers
1079 if args[i].reg is nonzero.
1081 These decisions are driven by the FUNCTION_... macros and must agree
1082 with those made by function.c. */
1084 /* See if this argument should be passed by invisible reference. */
1085 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
1086 || TREE_ADDRESSABLE (type)
1087 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1088 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
1089 type, argpos < n_named_args)
1090 #endif
1093 /* If we're compiling a thunk, pass through invisible
1094 references instead of making a copy. */
1095 if (current_function_is_thunk
1096 #ifdef FUNCTION_ARG_CALLEE_COPIES
1097 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
1098 type, argpos < n_named_args)
1099 /* If it's in a register, we must make a copy of it too. */
1100 /* ??? Is this a sufficient test? Is there a better one? */
1101 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1102 && REG_P (DECL_RTL (args[i].tree_value)))
1103 && ! TREE_ADDRESSABLE (type))
1104 #endif
1107 /* C++ uses a TARGET_EXPR to indicate that we want to make a
1108 new object from the argument. If we are passing by
1109 invisible reference, the callee will do that for us, so we
1110 can strip off the TARGET_EXPR. This is not always safe,
1111 but it is safe in the only case where this is a useful
1112 optimization; namely, when the argument is a plain object.
1113 In that case, the frontend is just asking the backend to
1114 make a bitwise copy of the argument. */
1116 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
1117 && (DECL_P (TREE_OPERAND (args[i].tree_value, 1)))
1118 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1119 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1121 args[i].tree_value = build1 (ADDR_EXPR,
1122 build_pointer_type (type),
1123 args[i].tree_value);
1124 type = build_pointer_type (type);
1126 else if (TREE_CODE (args[i].tree_value) == TARGET_EXPR)
1128 /* In the V3 C++ ABI, parameters are destroyed in the caller.
1129 We implement this by passing the address of the temporary
1130 rather than expanding it into another allocated slot. */
1131 args[i].tree_value = build1 (ADDR_EXPR,
1132 build_pointer_type (type),
1133 args[i].tree_value);
1134 type = build_pointer_type (type);
1136 else
1138 /* We make a copy of the object and pass the address to the
1139 function being called. */
1140 rtx copy;
1142 if (!COMPLETE_TYPE_P (type)
1143 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1144 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1145 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1146 STACK_CHECK_MAX_VAR_SIZE))))
1148 /* This is a variable-sized object. Make space on the stack
1149 for it. */
1150 rtx size_rtx = expr_size (TREE_VALUE (p));
1152 if (*old_stack_level == 0)
1154 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1155 *old_pending_adj = pending_stack_adjust;
1156 pending_stack_adjust = 0;
1159 copy = gen_rtx_MEM (BLKmode,
1160 allocate_dynamic_stack_space
1161 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1162 set_mem_attributes (copy, type, 1);
1164 else
1165 copy = assign_temp (type, 0, 1, 0);
1167 store_expr (args[i].tree_value, copy, 0);
1168 *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1170 args[i].tree_value = build1 (ADDR_EXPR,
1171 build_pointer_type (type),
1172 make_tree (type, copy));
1173 type = build_pointer_type (type);
1177 mode = TYPE_MODE (type);
1178 unsignedp = TREE_UNSIGNED (type);
1180 if (targetm.calls.promote_function_args (fndecl ? TREE_TYPE (fndecl) : 0))
1181 mode = promote_mode (type, mode, &unsignedp, 1);
1183 args[i].unsignedp = unsignedp;
1184 args[i].mode = mode;
1186 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1187 argpos < n_named_args);
1188 #ifdef FUNCTION_INCOMING_ARG
1189 /* If this is a sibling call and the machine has register windows, the
1190 register window has to be unwinded before calling the routine, so
1191 arguments have to go into the incoming registers. */
1192 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1193 argpos < n_named_args);
1194 #else
1195 args[i].tail_call_reg = args[i].reg;
1196 #endif
1198 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1199 if (args[i].reg)
1200 args[i].partial
1201 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1202 argpos < n_named_args);
1203 #endif
1205 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1207 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1208 it means that we are to pass this arg in the register(s) designated
1209 by the PARALLEL, but also to pass it in the stack. */
1210 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1211 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1212 args[i].pass_on_stack = 1;
1214 /* If this is an addressable type, we must preallocate the stack
1215 since we must evaluate the object into its final location.
1217 If this is to be passed in both registers and the stack, it is simpler
1218 to preallocate. */
1219 if (TREE_ADDRESSABLE (type)
1220 || (args[i].pass_on_stack && args[i].reg != 0))
1221 *must_preallocate = 1;
1223 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1224 we cannot consider this function call constant. */
1225 if (TREE_ADDRESSABLE (type))
1226 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1228 /* Compute the stack-size of this argument. */
1229 if (args[i].reg == 0 || args[i].partial != 0
1230 || reg_parm_stack_space > 0
1231 || args[i].pass_on_stack)
1232 locate_and_pad_parm (mode, type,
1233 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1235 #else
1236 args[i].reg != 0,
1237 #endif
1238 args[i].pass_on_stack ? 0 : args[i].partial,
1239 fndecl, args_size, &args[i].locate);
1240 #ifdef BLOCK_REG_PADDING
1241 else
1242 /* The argument is passed entirely in registers. See at which
1243 end it should be padded. */
1244 args[i].locate.where_pad =
1245 BLOCK_REG_PADDING (mode, type,
1246 int_size_in_bytes (type) <= UNITS_PER_WORD);
1247 #endif
1249 /* Update ARGS_SIZE, the total stack space for args so far. */
1251 args_size->constant += args[i].locate.size.constant;
1252 if (args[i].locate.size.var)
1253 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1255 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1256 have been used, etc. */
1258 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1259 argpos < n_named_args);
1263 /* Update ARGS_SIZE to contain the total size for the argument block.
1264 Return the original constant component of the argument block's size.
1266 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1267 for arguments passed in registers. */
1269 static int
1270 compute_argument_block_size (int reg_parm_stack_space,
1271 struct args_size *args_size,
1272 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1274 int unadjusted_args_size = args_size->constant;
1276 /* For accumulate outgoing args mode we don't need to align, since the frame
1277 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1278 backends from generating misaligned frame sizes. */
1279 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1280 preferred_stack_boundary = STACK_BOUNDARY;
1282 /* Compute the actual size of the argument block required. The variable
1283 and constant sizes must be combined, the size may have to be rounded,
1284 and there may be a minimum required size. */
1286 if (args_size->var)
1288 args_size->var = ARGS_SIZE_TREE (*args_size);
1289 args_size->constant = 0;
1291 preferred_stack_boundary /= BITS_PER_UNIT;
1292 if (preferred_stack_boundary > 1)
1294 /* We don't handle this case yet. To handle it correctly we have
1295 to add the delta, round and subtract the delta.
1296 Currently no machine description requires this support. */
1297 if (stack_pointer_delta & (preferred_stack_boundary - 1))
1298 abort ();
1299 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1302 if (reg_parm_stack_space > 0)
1304 args_size->var
1305 = size_binop (MAX_EXPR, args_size->var,
1306 ssize_int (reg_parm_stack_space));
1308 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1309 /* The area corresponding to register parameters is not to count in
1310 the size of the block we need. So make the adjustment. */
1311 args_size->var
1312 = size_binop (MINUS_EXPR, args_size->var,
1313 ssize_int (reg_parm_stack_space));
1314 #endif
1317 else
1319 preferred_stack_boundary /= BITS_PER_UNIT;
1320 if (preferred_stack_boundary < 1)
1321 preferred_stack_boundary = 1;
1322 args_size->constant = (((args_size->constant
1323 + stack_pointer_delta
1324 + preferred_stack_boundary - 1)
1325 / preferred_stack_boundary
1326 * preferred_stack_boundary)
1327 - stack_pointer_delta);
1329 args_size->constant = MAX (args_size->constant,
1330 reg_parm_stack_space);
1332 #ifdef MAYBE_REG_PARM_STACK_SPACE
1333 if (reg_parm_stack_space == 0)
1334 args_size->constant = 0;
1335 #endif
1337 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1338 args_size->constant -= reg_parm_stack_space;
1339 #endif
1341 return unadjusted_args_size;
1344 /* Precompute parameters as needed for a function call.
1346 FLAGS is mask of ECF_* constants.
1348 NUM_ACTUALS is the number of arguments.
1350 ARGS is an array containing information for each argument; this
1351 routine fills in the INITIAL_VALUE and VALUE fields for each
1352 precomputed argument. */
1354 static void
1355 precompute_arguments (int flags, int num_actuals, struct arg_data *args)
1357 int i;
1359 /* If this function call is cse'able, precompute all the parameters.
1360 Note that if the parameter is constructed into a temporary, this will
1361 cause an additional copy because the parameter will be constructed
1362 into a temporary location and then copied into the outgoing arguments.
1363 If a parameter contains a call to alloca and this function uses the
1364 stack, precompute the parameter. */
1366 /* If we preallocated the stack space, and some arguments must be passed
1367 on the stack, then we must precompute any parameter which contains a
1368 function call which will store arguments on the stack.
1369 Otherwise, evaluating the parameter may clobber previous parameters
1370 which have already been stored into the stack. (we have code to avoid
1371 such case by saving the outgoing stack arguments, but it results in
1372 worse code) */
1374 for (i = 0; i < num_actuals; i++)
1375 if ((flags & ECF_LIBCALL_BLOCK)
1376 || calls_function (args[i].tree_value, !ACCUMULATE_OUTGOING_ARGS))
1378 enum machine_mode mode;
1380 /* If this is an addressable type, we cannot pre-evaluate it. */
1381 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1382 abort ();
1384 args[i].value
1385 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1387 /* ANSI doesn't require a sequence point here,
1388 but PCC has one, so this will avoid some problems. */
1389 emit_queue ();
1391 args[i].initial_value = args[i].value
1392 = protect_from_queue (args[i].value, 0);
1394 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1395 if (mode != args[i].mode)
1397 args[i].value
1398 = convert_modes (args[i].mode, mode,
1399 args[i].value, args[i].unsignedp);
1400 #ifdef PROMOTE_FOR_CALL_ONLY
1401 /* CSE will replace this only if it contains args[i].value
1402 pseudo, so convert it down to the declared mode using
1403 a SUBREG. */
1404 if (GET_CODE (args[i].value) == REG
1405 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1407 args[i].initial_value
1408 = gen_lowpart_SUBREG (mode, args[i].value);
1409 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1410 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1411 args[i].unsignedp);
1413 #endif
1418 /* Given the current state of MUST_PREALLOCATE and information about
1419 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1420 compute and return the final value for MUST_PREALLOCATE. */
1422 static int
1423 finalize_must_preallocate (int must_preallocate, int num_actuals, struct arg_data *args, struct args_size *args_size)
1425 /* See if we have or want to preallocate stack space.
1427 If we would have to push a partially-in-regs parm
1428 before other stack parms, preallocate stack space instead.
1430 If the size of some parm is not a multiple of the required stack
1431 alignment, we must preallocate.
1433 If the total size of arguments that would otherwise create a copy in
1434 a temporary (such as a CALL) is more than half the total argument list
1435 size, preallocation is faster.
1437 Another reason to preallocate is if we have a machine (like the m88k)
1438 where stack alignment is required to be maintained between every
1439 pair of insns, not just when the call is made. However, we assume here
1440 that such machines either do not have push insns (and hence preallocation
1441 would occur anyway) or the problem is taken care of with
1442 PUSH_ROUNDING. */
1444 if (! must_preallocate)
1446 int partial_seen = 0;
1447 int copy_to_evaluate_size = 0;
1448 int i;
1450 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1452 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1453 partial_seen = 1;
1454 else if (partial_seen && args[i].reg == 0)
1455 must_preallocate = 1;
1457 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1458 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1459 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1460 || TREE_CODE (args[i].tree_value) == COND_EXPR
1461 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1462 copy_to_evaluate_size
1463 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1466 if (copy_to_evaluate_size * 2 >= args_size->constant
1467 && args_size->constant > 0)
1468 must_preallocate = 1;
1470 return must_preallocate;
1473 /* If we preallocated stack space, compute the address of each argument
1474 and store it into the ARGS array.
1476 We need not ensure it is a valid memory address here; it will be
1477 validized when it is used.
1479 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1481 static void
1482 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1484 if (argblock)
1486 rtx arg_reg = argblock;
1487 int i, arg_offset = 0;
1489 if (GET_CODE (argblock) == PLUS)
1490 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1492 for (i = 0; i < num_actuals; i++)
1494 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1495 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1496 rtx addr;
1498 /* Skip this parm if it will not be passed on the stack. */
1499 if (! args[i].pass_on_stack && args[i].reg != 0)
1500 continue;
1502 if (GET_CODE (offset) == CONST_INT)
1503 addr = plus_constant (arg_reg, INTVAL (offset));
1504 else
1505 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1507 addr = plus_constant (addr, arg_offset);
1508 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1509 set_mem_align (args[i].stack, PARM_BOUNDARY);
1510 set_mem_attributes (args[i].stack,
1511 TREE_TYPE (args[i].tree_value), 1);
1513 if (GET_CODE (slot_offset) == CONST_INT)
1514 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1515 else
1516 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1518 addr = plus_constant (addr, arg_offset);
1519 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1520 set_mem_align (args[i].stack_slot, PARM_BOUNDARY);
1521 set_mem_attributes (args[i].stack_slot,
1522 TREE_TYPE (args[i].tree_value), 1);
1524 /* Function incoming arguments may overlap with sibling call
1525 outgoing arguments and we cannot allow reordering of reads
1526 from function arguments with stores to outgoing arguments
1527 of sibling calls. */
1528 set_mem_alias_set (args[i].stack, 0);
1529 set_mem_alias_set (args[i].stack_slot, 0);
1534 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1535 in a call instruction.
1537 FNDECL is the tree node for the target function. For an indirect call
1538 FNDECL will be NULL_TREE.
1540 ADDR is the operand 0 of CALL_EXPR for this call. */
1542 static rtx
1543 rtx_for_function_call (tree fndecl, tree addr)
1545 rtx funexp;
1547 /* Get the function to call, in the form of RTL. */
1548 if (fndecl)
1550 /* If this is the first use of the function, see if we need to
1551 make an external definition for it. */
1552 if (! TREE_USED (fndecl))
1554 assemble_external (fndecl);
1555 TREE_USED (fndecl) = 1;
1558 /* Get a SYMBOL_REF rtx for the function address. */
1559 funexp = XEXP (DECL_RTL (fndecl), 0);
1561 else
1562 /* Generate an rtx (probably a pseudo-register) for the address. */
1564 push_temp_slots ();
1565 funexp = expand_expr (addr, NULL_RTX, VOIDmode, 0);
1566 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1567 emit_queue ();
1569 return funexp;
1572 /* Do the register loads required for any wholly-register parms or any
1573 parms which are passed both on the stack and in a register. Their
1574 expressions were already evaluated.
1576 Mark all register-parms as living through the call, putting these USE
1577 insns in the CALL_INSN_FUNCTION_USAGE field.
1579 When IS_SIBCALL, perform the check_sibcall_overlap_argument_overlap
1580 checking, setting *SIBCALL_FAILURE if appropriate. */
1582 static void
1583 load_register_parameters (struct arg_data *args, int num_actuals,
1584 rtx *call_fusage, int flags, int is_sibcall,
1585 int *sibcall_failure)
1587 int i, j;
1589 #ifdef LOAD_ARGS_REVERSED
1590 for (i = num_actuals - 1; i >= 0; i--)
1591 #else
1592 for (i = 0; i < num_actuals; i++)
1593 #endif
1595 rtx reg = ((flags & ECF_SIBCALL)
1596 ? args[i].tail_call_reg : args[i].reg);
1597 if (reg)
1599 int partial = args[i].partial;
1600 int nregs;
1601 int size = 0;
1602 rtx before_arg = get_last_insn ();
1603 /* Set to non-negative if must move a word at a time, even if just
1604 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1605 we just use a normal move insn. This value can be zero if the
1606 argument is a zero size structure with no fields. */
1607 nregs = -1;
1608 if (partial)
1609 nregs = partial;
1610 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1612 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1613 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1615 else
1616 size = GET_MODE_SIZE (args[i].mode);
1618 /* Handle calls that pass values in multiple non-contiguous
1619 locations. The Irix 6 ABI has examples of this. */
1621 if (GET_CODE (reg) == PARALLEL)
1623 tree type = TREE_TYPE (args[i].tree_value);
1624 emit_group_load (reg, args[i].value, type,
1625 int_size_in_bytes (type));
1628 /* If simple case, just do move. If normal partial, store_one_arg
1629 has already loaded the register for us. In all other cases,
1630 load the register(s) from memory. */
1632 else if (nregs == -1)
1634 emit_move_insn (reg, args[i].value);
1635 #ifdef BLOCK_REG_PADDING
1636 /* Handle case where we have a value that needs shifting
1637 up to the msb. eg. a QImode value and we're padding
1638 upward on a BYTES_BIG_ENDIAN machine. */
1639 if (size < UNITS_PER_WORD
1640 && (args[i].locate.where_pad
1641 == (BYTES_BIG_ENDIAN ? upward : downward)))
1643 rtx x;
1644 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1646 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1647 report the whole reg as used. Strictly speaking, the
1648 call only uses SIZE bytes at the msb end, but it doesn't
1649 seem worth generating rtl to say that. */
1650 reg = gen_rtx_REG (word_mode, REGNO (reg));
1651 x = expand_binop (word_mode, ashl_optab, reg,
1652 GEN_INT (shift), reg, 1, OPTAB_WIDEN);
1653 if (x != reg)
1654 emit_move_insn (reg, x);
1656 #endif
1659 /* If we have pre-computed the values to put in the registers in
1660 the case of non-aligned structures, copy them in now. */
1662 else if (args[i].n_aligned_regs != 0)
1663 for (j = 0; j < args[i].n_aligned_regs; j++)
1664 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1665 args[i].aligned_regs[j]);
1667 else if (partial == 0 || args[i].pass_on_stack)
1669 rtx mem = validize_mem (args[i].value);
1671 #ifdef BLOCK_REG_PADDING
1672 /* Handle a BLKmode that needs shifting. */
1673 if (nregs == 1 && size < UNITS_PER_WORD
1674 && args[i].locate.where_pad == downward)
1676 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1677 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1678 rtx x = gen_reg_rtx (word_mode);
1679 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1680 optab dir = BYTES_BIG_ENDIAN ? lshr_optab : ashl_optab;
1682 emit_move_insn (x, tem);
1683 x = expand_binop (word_mode, dir, x, GEN_INT (shift),
1684 ri, 1, OPTAB_WIDEN);
1685 if (x != ri)
1686 emit_move_insn (ri, x);
1688 else
1689 #endif
1690 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1693 /* When a parameter is a block, and perhaps in other cases, it is
1694 possible that it did a load from an argument slot that was
1695 already clobbered. */
1696 if (is_sibcall
1697 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1698 *sibcall_failure = 1;
1700 /* Handle calls that pass values in multiple non-contiguous
1701 locations. The Irix 6 ABI has examples of this. */
1702 if (GET_CODE (reg) == PARALLEL)
1703 use_group_regs (call_fusage, reg);
1704 else if (nregs == -1)
1705 use_reg (call_fusage, reg);
1706 else
1707 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1712 /* Try to integrate function. See expand_inline_function for documentation
1713 about the parameters. */
1715 static rtx
1716 try_to_integrate (tree fndecl, tree actparms, rtx target, int ignore,
1717 tree type, rtx structure_value_addr)
1719 rtx temp;
1720 rtx before_call;
1721 int i;
1722 rtx old_stack_level = 0;
1723 int reg_parm_stack_space = 0;
1725 #ifdef REG_PARM_STACK_SPACE
1726 #ifdef MAYBE_REG_PARM_STACK_SPACE
1727 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1728 #else
1729 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1730 #endif
1731 #endif
1733 before_call = get_last_insn ();
1735 timevar_push (TV_INTEGRATION);
1737 temp = expand_inline_function (fndecl, actparms, target,
1738 ignore, type,
1739 structure_value_addr);
1741 timevar_pop (TV_INTEGRATION);
1743 /* If inlining succeeded, return. */
1744 if (temp != (rtx) (size_t) - 1)
1746 if (ACCUMULATE_OUTGOING_ARGS)
1748 /* If the outgoing argument list must be preserved, push
1749 the stack before executing the inlined function if it
1750 makes any calls. */
1752 i = reg_parm_stack_space;
1753 if (i > highest_outgoing_arg_in_use)
1754 i = highest_outgoing_arg_in_use;
1755 while (--i >= 0 && stack_usage_map[i] == 0)
1758 if (stack_arg_under_construction || i >= 0)
1760 rtx first_insn
1761 = before_call ? NEXT_INSN (before_call) : get_insns ();
1762 rtx insn = NULL_RTX, seq;
1764 /* Look for a call in the inline function code.
1765 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1766 nonzero then there is a call and it is not necessary
1767 to scan the insns. */
1769 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
1770 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1771 if (GET_CODE (insn) == CALL_INSN)
1772 break;
1774 if (insn)
1776 /* Reserve enough stack space so that the largest
1777 argument list of any function call in the inline
1778 function does not overlap the argument list being
1779 evaluated. This is usually an overestimate because
1780 allocate_dynamic_stack_space reserves space for an
1781 outgoing argument list in addition to the requested
1782 space, but there is no way to ask for stack space such
1783 that an argument list of a certain length can be
1784 safely constructed.
1786 Add the stack space reserved for register arguments, if
1787 any, in the inline function. What is really needed is the
1788 largest value of reg_parm_stack_space in the inline
1789 function, but that is not available. Using the current
1790 value of reg_parm_stack_space is wrong, but gives
1791 correct results on all supported machines. */
1793 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
1794 + reg_parm_stack_space);
1796 start_sequence ();
1797 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1798 allocate_dynamic_stack_space (GEN_INT (adjust),
1799 NULL_RTX, BITS_PER_UNIT);
1800 seq = get_insns ();
1801 end_sequence ();
1802 emit_insn_before (seq, first_insn);
1803 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1808 /* If the result is equivalent to TARGET, return TARGET to simplify
1809 checks in store_expr. They can be equivalent but not equal in the
1810 case of a function that returns BLKmode. */
1811 if (temp != target && rtx_equal_p (temp, target))
1812 return target;
1813 return temp;
1816 /* If inlining failed, mark FNDECL as needing to be compiled
1817 separately after all. If function was declared inline,
1818 give a warning. */
1819 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1820 && optimize > 0 && !TREE_ADDRESSABLE (fndecl))
1822 warning ("%Hinlining failed in call to '%F'",
1823 &DECL_SOURCE_LOCATION (fndecl), fndecl);
1824 warning ("called from here");
1826 (*lang_hooks.mark_addressable) (fndecl);
1827 return (rtx) (size_t) - 1;
1830 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1831 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1832 bytes, then we would need to push some additional bytes to pad the
1833 arguments. So, we compute an adjust to the stack pointer for an
1834 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1835 bytes. Then, when the arguments are pushed the stack will be perfectly
1836 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1837 be popped after the call. Returns the adjustment. */
1839 static int
1840 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1841 struct args_size *args_size,
1842 int preferred_unit_stack_boundary)
1844 /* The number of bytes to pop so that the stack will be
1845 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1846 HOST_WIDE_INT adjustment;
1847 /* The alignment of the stack after the arguments are pushed, if we
1848 just pushed the arguments without adjust the stack here. */
1849 HOST_WIDE_INT unadjusted_alignment;
1851 unadjusted_alignment
1852 = ((stack_pointer_delta + unadjusted_args_size)
1853 % preferred_unit_stack_boundary);
1855 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1856 as possible -- leaving just enough left to cancel out the
1857 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1858 PENDING_STACK_ADJUST is non-negative, and congruent to
1859 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1861 /* Begin by trying to pop all the bytes. */
1862 unadjusted_alignment
1863 = (unadjusted_alignment
1864 - (pending_stack_adjust % preferred_unit_stack_boundary));
1865 adjustment = pending_stack_adjust;
1866 /* Push enough additional bytes that the stack will be aligned
1867 after the arguments are pushed. */
1868 if (preferred_unit_stack_boundary > 1)
1870 if (unadjusted_alignment > 0)
1871 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1872 else
1873 adjustment += unadjusted_alignment;
1876 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1877 bytes after the call. The right number is the entire
1878 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1879 by the arguments in the first place. */
1880 args_size->constant
1881 = pending_stack_adjust - adjustment + unadjusted_args_size;
1883 return adjustment;
1886 /* Scan X expression if it does not dereference any argument slots
1887 we already clobbered by tail call arguments (as noted in stored_args_map
1888 bitmap).
1889 Return nonzero if X expression dereferences such argument slots,
1890 zero otherwise. */
1892 static int
1893 check_sibcall_argument_overlap_1 (rtx x)
1895 RTX_CODE code;
1896 int i, j;
1897 unsigned int k;
1898 const char *fmt;
1900 if (x == NULL_RTX)
1901 return 0;
1903 code = GET_CODE (x);
1905 if (code == MEM)
1907 if (XEXP (x, 0) == current_function_internal_arg_pointer)
1908 i = 0;
1909 else if (GET_CODE (XEXP (x, 0)) == PLUS
1910 && XEXP (XEXP (x, 0), 0) ==
1911 current_function_internal_arg_pointer
1912 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
1913 i = INTVAL (XEXP (XEXP (x, 0), 1));
1914 else
1915 return 0;
1917 #ifdef ARGS_GROW_DOWNWARD
1918 i = -i - GET_MODE_SIZE (GET_MODE (x));
1919 #endif
1921 for (k = 0; k < GET_MODE_SIZE (GET_MODE (x)); k++)
1922 if (i + k < stored_args_map->n_bits
1923 && TEST_BIT (stored_args_map, i + k))
1924 return 1;
1926 return 0;
1929 /* Scan all subexpressions. */
1930 fmt = GET_RTX_FORMAT (code);
1931 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1933 if (*fmt == 'e')
1935 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1936 return 1;
1938 else if (*fmt == 'E')
1940 for (j = 0; j < XVECLEN (x, i); j++)
1941 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1942 return 1;
1945 return 0;
1948 /* Scan sequence after INSN if it does not dereference any argument slots
1949 we already clobbered by tail call arguments (as noted in stored_args_map
1950 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1951 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1952 should be 0). Return nonzero if sequence after INSN dereferences such argument
1953 slots, zero otherwise. */
1955 static int
1956 check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
1958 int low, high;
1960 if (insn == NULL_RTX)
1961 insn = get_insns ();
1962 else
1963 insn = NEXT_INSN (insn);
1965 for (; insn; insn = NEXT_INSN (insn))
1966 if (INSN_P (insn)
1967 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
1968 break;
1970 if (mark_stored_args_map)
1972 #ifdef ARGS_GROW_DOWNWARD
1973 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
1974 #else
1975 low = arg->locate.slot_offset.constant;
1976 #endif
1978 for (high = low + arg->locate.size.constant; low < high; low++)
1979 SET_BIT (stored_args_map, low);
1981 return insn != NULL_RTX;
1984 static tree
1985 fix_unsafe_tree (tree t)
1987 switch (unsafe_for_reeval (t))
1989 case 0: /* Safe. */
1990 break;
1992 case 1: /* Mildly unsafe. */
1993 t = unsave_expr (t);
1994 break;
1996 case 2: /* Wildly unsafe. */
1998 tree var = build_decl (VAR_DECL, NULL_TREE,
1999 TREE_TYPE (t));
2000 SET_DECL_RTL (var,
2001 expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL));
2002 t = var;
2004 break;
2006 default:
2007 abort ();
2009 return t;
2012 /* Generate all the code for a function call
2013 and return an rtx for its value.
2014 Store the value in TARGET (specified as an rtx) if convenient.
2015 If the value is stored in TARGET then TARGET is returned.
2016 If IGNORE is nonzero, then we ignore the value of the function call. */
2019 expand_call (tree exp, rtx target, int ignore)
2021 /* Nonzero if we are currently expanding a call. */
2022 static int currently_expanding_call = 0;
2024 /* List of actual parameters. */
2025 tree actparms = TREE_OPERAND (exp, 1);
2026 /* RTX for the function to be called. */
2027 rtx funexp;
2028 /* Sequence of insns to perform a tail recursive "call". */
2029 rtx tail_recursion_insns = NULL_RTX;
2030 /* Sequence of insns to perform a normal "call". */
2031 rtx normal_call_insns = NULL_RTX;
2032 /* Sequence of insns to perform a tail recursive "call". */
2033 rtx tail_call_insns = NULL_RTX;
2034 /* Data type of the function. */
2035 tree funtype;
2036 tree type_arg_types;
2037 /* Declaration of the function being called,
2038 or 0 if the function is computed (not known by name). */
2039 tree fndecl = 0;
2040 rtx insn;
2041 int try_tail_call = 1;
2042 int try_tail_recursion = 1;
2043 int pass;
2045 /* Register in which non-BLKmode value will be returned,
2046 or 0 if no value or if value is BLKmode. */
2047 rtx valreg;
2048 /* Address where we should return a BLKmode value;
2049 0 if value not BLKmode. */
2050 rtx structure_value_addr = 0;
2051 /* Nonzero if that address is being passed by treating it as
2052 an extra, implicit first parameter. Otherwise,
2053 it is passed by being copied directly into struct_value_rtx. */
2054 int structure_value_addr_parm = 0;
2055 /* Size of aggregate value wanted, or zero if none wanted
2056 or if we are using the non-reentrant PCC calling convention
2057 or expecting the value in registers. */
2058 HOST_WIDE_INT struct_value_size = 0;
2059 /* Nonzero if called function returns an aggregate in memory PCC style,
2060 by returning the address of where to find it. */
2061 int pcc_struct_value = 0;
2062 rtx struct_value = 0;
2064 /* Number of actual parameters in this call, including struct value addr. */
2065 int num_actuals;
2066 /* Number of named args. Args after this are anonymous ones
2067 and they must all go on the stack. */
2068 int n_named_args;
2070 /* Vector of information about each argument.
2071 Arguments are numbered in the order they will be pushed,
2072 not the order they are written. */
2073 struct arg_data *args;
2075 /* Total size in bytes of all the stack-parms scanned so far. */
2076 struct args_size args_size;
2077 struct args_size adjusted_args_size;
2078 /* Size of arguments before any adjustments (such as rounding). */
2079 int unadjusted_args_size;
2080 /* Data on reg parms scanned so far. */
2081 CUMULATIVE_ARGS args_so_far;
2082 /* Nonzero if a reg parm has been scanned. */
2083 int reg_parm_seen;
2084 /* Nonzero if this is an indirect function call. */
2086 /* Nonzero if we must avoid push-insns in the args for this call.
2087 If stack space is allocated for register parameters, but not by the
2088 caller, then it is preallocated in the fixed part of the stack frame.
2089 So the entire argument block must then be preallocated (i.e., we
2090 ignore PUSH_ROUNDING in that case). */
2092 int must_preallocate = !PUSH_ARGS;
2094 /* Size of the stack reserved for parameter registers. */
2095 int reg_parm_stack_space = 0;
2097 /* Address of space preallocated for stack parms
2098 (on machines that lack push insns), or 0 if space not preallocated. */
2099 rtx argblock = 0;
2101 /* Mask of ECF_ flags. */
2102 int flags = 0;
2103 /* Nonzero if this is a call to an inline function. */
2104 int is_integrable = 0;
2105 #ifdef REG_PARM_STACK_SPACE
2106 /* Define the boundary of the register parm stack space that needs to be
2107 saved, if any. */
2108 int low_to_save, high_to_save;
2109 rtx save_area = 0; /* Place that it is saved */
2110 #endif
2112 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2113 char *initial_stack_usage_map = stack_usage_map;
2115 int old_stack_allocated;
2117 /* State variables to track stack modifications. */
2118 rtx old_stack_level = 0;
2119 int old_stack_arg_under_construction = 0;
2120 int old_pending_adj = 0;
2121 int old_inhibit_defer_pop = inhibit_defer_pop;
2123 /* Some stack pointer alterations we make are performed via
2124 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2125 which we then also need to save/restore along the way. */
2126 int old_stack_pointer_delta = 0;
2128 rtx call_fusage;
2129 tree p = TREE_OPERAND (exp, 0);
2130 tree addr = TREE_OPERAND (exp, 0);
2131 int i;
2132 /* The alignment of the stack, in bits. */
2133 HOST_WIDE_INT preferred_stack_boundary;
2134 /* The alignment of the stack, in bytes. */
2135 HOST_WIDE_INT preferred_unit_stack_boundary;
2137 /* See if this is "nothrow" function call. */
2138 if (TREE_NOTHROW (exp))
2139 flags |= ECF_NOTHROW;
2141 /* See if we can find a DECL-node for the actual function.
2142 As a result, decide whether this is a call to an integrable function. */
2144 fndecl = get_callee_fndecl (exp);
2145 if (fndecl)
2147 if (!flag_no_inline
2148 && fndecl != current_function_decl
2149 && DECL_INLINE (fndecl)
2150 && DECL_SAVED_INSNS (fndecl)
2151 && DECL_SAVED_INSNS (fndecl)->inlinable)
2152 is_integrable = 1;
2153 else if (! TREE_ADDRESSABLE (fndecl))
2155 /* In case this function later becomes inlinable,
2156 record that there was already a non-inline call to it.
2158 Use abstraction instead of setting TREE_ADDRESSABLE
2159 directly. */
2160 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
2161 && optimize > 0)
2163 warning ("%Hcan't inline call to '%F'",
2164 &DECL_SOURCE_LOCATION (fndecl), fndecl);
2165 warning ("called from here");
2167 (*lang_hooks.mark_addressable) (fndecl);
2170 flags |= flags_from_decl_or_type (fndecl);
2173 /* If we don't have specific function to call, see if we have a
2174 attributes set in the type. */
2175 else
2176 flags |= flags_from_decl_or_type (TREE_TYPE (TREE_TYPE (p)));
2178 struct_value = targetm.calls.struct_value_rtx (fndecl ? TREE_TYPE (fndecl) : 0, 0);
2180 /* Warn if this value is an aggregate type,
2181 regardless of which calling convention we are using for it. */
2182 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
2183 warning ("function call has aggregate value");
2185 /* If the result of a pure or const function call is ignored (or void),
2186 and none of its arguments are volatile, we can avoid expanding the
2187 call and just evaluate the arguments for side-effects. */
2188 if ((flags & (ECF_CONST | ECF_PURE))
2189 && (ignore || target == const0_rtx
2190 || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
2192 bool volatilep = false;
2193 tree arg;
2195 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
2196 if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
2198 volatilep = true;
2199 break;
2202 if (! volatilep)
2204 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
2205 expand_expr (TREE_VALUE (arg), const0_rtx,
2206 VOIDmode, EXPAND_NORMAL);
2207 return const0_rtx;
2211 #ifdef REG_PARM_STACK_SPACE
2212 #ifdef MAYBE_REG_PARM_STACK_SPACE
2213 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2214 #else
2215 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2216 #endif
2217 #endif
2219 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2220 if (reg_parm_stack_space > 0 && PUSH_ARGS)
2221 must_preallocate = 1;
2222 #endif
2224 /* Set up a place to return a structure. */
2226 /* Cater to broken compilers. */
2227 if (aggregate_value_p (exp, fndecl))
2229 /* This call returns a big structure. */
2230 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
2232 #ifdef PCC_STATIC_STRUCT_RETURN
2234 pcc_struct_value = 1;
2235 /* Easier than making that case work right. */
2236 if (is_integrable)
2238 /* In case this is a static function, note that it has been
2239 used. */
2240 if (! TREE_ADDRESSABLE (fndecl))
2241 (*lang_hooks.mark_addressable) (fndecl);
2242 is_integrable = 0;
2245 #else /* not PCC_STATIC_STRUCT_RETURN */
2247 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2249 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (exp))
2251 /* The structure value address arg is already in actparms.
2252 Pull it out. It might be nice to just leave it there, but
2253 we need to set structure_value_addr. */
2254 tree return_arg = TREE_VALUE (actparms);
2255 actparms = TREE_CHAIN (actparms);
2256 structure_value_addr = expand_expr (return_arg, NULL_RTX,
2257 VOIDmode, EXPAND_NORMAL);
2259 else if (target && GET_CODE (target) == MEM)
2260 structure_value_addr = XEXP (target, 0);
2261 else
2263 /* For variable-sized objects, we must be called with a target
2264 specified. If we were to allocate space on the stack here,
2265 we would have no way of knowing when to free it. */
2266 rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
2268 mark_temp_addr_taken (d);
2269 structure_value_addr = XEXP (d, 0);
2270 target = 0;
2273 #endif /* not PCC_STATIC_STRUCT_RETURN */
2276 /* If called function is inline, try to integrate it. */
2278 if (is_integrable)
2280 rtx temp = try_to_integrate (fndecl, actparms, target,
2281 ignore, TREE_TYPE (exp),
2282 structure_value_addr);
2283 if (temp != (rtx) (size_t) - 1)
2284 return temp;
2287 /* Figure out the amount to which the stack should be aligned. */
2288 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2289 if (fndecl)
2291 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2292 if (i && i->preferred_incoming_stack_boundary)
2293 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2296 /* Operand 0 is a pointer-to-function; get the type of the function. */
2297 funtype = TREE_TYPE (addr);
2298 if (! POINTER_TYPE_P (funtype))
2299 abort ();
2300 funtype = TREE_TYPE (funtype);
2302 /* Munge the tree to split complex arguments into their imaginary
2303 and real parts. */
2304 if (SPLIT_COMPLEX_ARGS)
2306 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2307 actparms = split_complex_values (actparms);
2309 else
2310 type_arg_types = TYPE_ARG_TYPES (funtype);
2312 /* See if this is a call to a function that can return more than once
2313 or a call to longjmp or malloc. */
2314 flags |= special_function_p (fndecl, flags);
2316 if (flags & ECF_MAY_BE_ALLOCA)
2317 current_function_calls_alloca = 1;
2319 /* If struct_value_rtx is 0, it means pass the address
2320 as if it were an extra parameter. */
2321 if (structure_value_addr && struct_value == 0)
2323 /* If structure_value_addr is a REG other than
2324 virtual_outgoing_args_rtx, we can use always use it. If it
2325 is not a REG, we must always copy it into a register.
2326 If it is virtual_outgoing_args_rtx, we must copy it to another
2327 register in some cases. */
2328 rtx temp = (GET_CODE (structure_value_addr) != REG
2329 || (ACCUMULATE_OUTGOING_ARGS
2330 && stack_arg_under_construction
2331 && structure_value_addr == virtual_outgoing_args_rtx)
2332 ? copy_addr_to_reg (structure_value_addr)
2333 : structure_value_addr);
2335 actparms
2336 = tree_cons (error_mark_node,
2337 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2338 temp),
2339 actparms);
2340 structure_value_addr_parm = 1;
2343 /* Count the arguments and set NUM_ACTUALS. */
2344 for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2345 num_actuals++;
2347 /* Start updating where the next arg would go.
2349 On some machines (such as the PA) indirect calls have a difuferent
2350 calling convention than normal calls. The last argument in
2351 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2352 or not. */
2353 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl);
2355 /* Compute number of named args.
2356 Normally, don't include the last named arg if anonymous args follow.
2357 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
2358 (If no anonymous args follow, the result of list_length is actually
2359 one too large. This is harmless.)
2361 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
2362 zero, this machine will be able to place unnamed args that were
2363 passed in registers into the stack. So treat all args as named.
2364 This allows the insns emitting for a specific argument list to be
2365 independent of the function declaration.
2367 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any
2368 reliable way to pass unnamed args in registers, so we must force
2369 them into memory. */
2371 if ((targetm.calls.strict_argument_naming (&args_so_far)
2372 || ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
2373 && type_arg_types != 0)
2374 n_named_args
2375 = (list_length (type_arg_types)
2376 /* Don't include the last named arg. */
2377 - (targetm.calls.strict_argument_naming (&args_so_far) ? 0 : 1)
2378 /* Count the struct value address, if it is passed as a parm. */
2379 + structure_value_addr_parm);
2380 else
2381 /* If we know nothing, treat all args as named. */
2382 n_named_args = num_actuals;
2384 /* Make a vector to hold all the information about each arg. */
2385 args = alloca (num_actuals * sizeof (struct arg_data));
2386 memset (args, 0, num_actuals * sizeof (struct arg_data));
2388 /* Build up entries in the ARGS array, compute the size of the
2389 arguments into ARGS_SIZE, etc. */
2390 initialize_argument_information (num_actuals, args, &args_size,
2391 n_named_args, actparms, fndecl,
2392 &args_so_far, reg_parm_stack_space,
2393 &old_stack_level, &old_pending_adj,
2394 &must_preallocate, &flags);
2396 if (args_size.var)
2398 /* If this function requires a variable-sized argument list, don't
2399 try to make a cse'able block for this call. We may be able to
2400 do this eventually, but it is too complicated to keep track of
2401 what insns go in the cse'able block and which don't. */
2403 flags &= ~ECF_LIBCALL_BLOCK;
2404 must_preallocate = 1;
2407 /* Now make final decision about preallocating stack space. */
2408 must_preallocate = finalize_must_preallocate (must_preallocate,
2409 num_actuals, args,
2410 &args_size);
2412 /* If the structure value address will reference the stack pointer, we
2413 must stabilize it. We don't need to do this if we know that we are
2414 not going to adjust the stack pointer in processing this call. */
2416 if (structure_value_addr
2417 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2418 || reg_mentioned_p (virtual_outgoing_args_rtx,
2419 structure_value_addr))
2420 && (args_size.var
2421 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2422 structure_value_addr = copy_to_reg (structure_value_addr);
2424 /* Tail calls can make things harder to debug, and we're traditionally
2425 pushed these optimizations into -O2. Don't try if we're already
2426 expanding a call, as that means we're an argument. Don't try if
2427 there's cleanups, as we know there's code to follow the call.
2429 If rtx_equal_function_value_matters is false, that means we've
2430 finished with regular parsing. Which means that some of the
2431 machinery we use to generate tail-calls is no longer in place.
2432 This is most often true of sjlj-exceptions, which we couldn't
2433 tail-call to anyway. */
2435 if (currently_expanding_call++ != 0
2436 || !flag_optimize_sibling_calls
2437 || !rtx_equal_function_value_matters
2438 || any_pending_cleanups ()
2439 || args_size.var)
2440 try_tail_call = try_tail_recursion = 0;
2442 /* Tail recursion fails, when we are not dealing with recursive calls. */
2443 if (!try_tail_recursion
2444 || TREE_CODE (addr) != ADDR_EXPR
2445 || TREE_OPERAND (addr, 0) != current_function_decl)
2446 try_tail_recursion = 0;
2448 /* Rest of purposes for tail call optimizations to fail. */
2449 if (
2450 #ifdef HAVE_sibcall_epilogue
2451 !HAVE_sibcall_epilogue
2452 #else
2454 #endif
2455 || !try_tail_call
2456 /* Doing sibling call optimization needs some work, since
2457 structure_value_addr can be allocated on the stack.
2458 It does not seem worth the effort since few optimizable
2459 sibling calls will return a structure. */
2460 || structure_value_addr != NULL_RTX
2461 /* Check whether the target is able to optimize the call
2462 into a sibcall. */
2463 || !(*targetm.function_ok_for_sibcall) (fndecl, exp)
2464 /* Functions that do not return exactly once may not be sibcall
2465 optimized. */
2466 || (flags & (ECF_RETURNS_TWICE | ECF_LONGJMP | ECF_NORETURN))
2467 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2468 /* If the called function is nested in the current one, it might access
2469 some of the caller's arguments, but could clobber them beforehand if
2470 the argument areas are shared. */
2471 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2472 /* If this function requires more stack slots than the current
2473 function, we cannot change it into a sibling call. */
2474 || args_size.constant > current_function_args_size
2475 /* If the callee pops its own arguments, then it must pop exactly
2476 the same number of arguments as the current function. */
2477 || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2478 != RETURN_POPS_ARGS (current_function_decl,
2479 TREE_TYPE (current_function_decl),
2480 current_function_args_size))
2481 || !(*lang_hooks.decls.ok_for_sibcall) (fndecl))
2482 try_tail_call = 0;
2484 if (try_tail_call || try_tail_recursion)
2486 int end, inc;
2487 actparms = NULL_TREE;
2488 /* Ok, we're going to give the tail call the old college try.
2489 This means we're going to evaluate the function arguments
2490 up to three times. There are two degrees of badness we can
2491 encounter, those that can be unsaved and those that can't.
2492 (See unsafe_for_reeval commentary for details.)
2494 Generate a new argument list. Pass safe arguments through
2495 unchanged. For the easy badness wrap them in UNSAVE_EXPRs.
2496 For hard badness, evaluate them now and put their resulting
2497 rtx in a temporary VAR_DECL.
2499 initialize_argument_information has ordered the array for the
2500 order to be pushed, and we must remember this when reconstructing
2501 the original argument order. */
2503 if (PUSH_ARGS_REVERSED)
2505 inc = 1;
2506 i = 0;
2507 end = num_actuals;
2509 else
2511 inc = -1;
2512 i = num_actuals - 1;
2513 end = -1;
2516 for (; i != end; i += inc)
2518 args[i].tree_value = fix_unsafe_tree (args[i].tree_value);
2519 /* We need to build actparms for optimize_tail_recursion. We can
2520 safely trash away TREE_PURPOSE, since it is unused by this
2521 function. */
2522 if (try_tail_recursion)
2523 actparms = tree_cons (NULL_TREE, args[i].tree_value, actparms);
2525 /* Do the same for the function address if it is an expression. */
2526 if (!fndecl)
2527 addr = fix_unsafe_tree (addr);
2528 /* Expanding one of those dangerous arguments could have added
2529 cleanups, but otherwise give it a whirl. */
2530 if (any_pending_cleanups ())
2531 try_tail_call = try_tail_recursion = 0;
2534 /* Generate a tail recursion sequence when calling ourselves. */
2536 if (try_tail_recursion)
2538 /* We want to emit any pending stack adjustments before the tail
2539 recursion "call". That way we know any adjustment after the tail
2540 recursion call can be ignored if we indeed use the tail recursion
2541 call expansion. */
2542 int save_pending_stack_adjust = pending_stack_adjust;
2543 int save_stack_pointer_delta = stack_pointer_delta;
2545 /* Emit any queued insns now; otherwise they would end up in
2546 only one of the alternates. */
2547 emit_queue ();
2549 /* Use a new sequence to hold any RTL we generate. We do not even
2550 know if we will use this RTL yet. The final decision can not be
2551 made until after RTL generation for the entire function is
2552 complete. */
2553 start_sequence ();
2554 /* If expanding any of the arguments creates cleanups, we can't
2555 do a tailcall. So, we'll need to pop the pending cleanups
2556 list. If, however, all goes well, and there are no cleanups
2557 then the call to expand_start_target_temps will have no
2558 effect. */
2559 expand_start_target_temps ();
2560 if (optimize_tail_recursion (actparms, get_last_insn ()))
2562 if (any_pending_cleanups ())
2563 try_tail_call = try_tail_recursion = 0;
2564 else
2565 tail_recursion_insns = get_insns ();
2567 expand_end_target_temps ();
2568 end_sequence ();
2570 /* Restore the original pending stack adjustment for the sibling and
2571 normal call cases below. */
2572 pending_stack_adjust = save_pending_stack_adjust;
2573 stack_pointer_delta = save_stack_pointer_delta;
2576 if (profile_arc_flag && (flags & ECF_FORK_OR_EXEC))
2578 /* A fork duplicates the profile information, and an exec discards
2579 it. We can't rely on fork/exec to be paired. So write out the
2580 profile information we have gathered so far, and clear it. */
2581 /* ??? When Linux's __clone is called with CLONE_VM set, profiling
2582 is subject to race conditions, just as with multithreaded
2583 programs. */
2585 emit_library_call (gcov_flush_libfunc, LCT_ALWAYS_RETURN, VOIDmode, 0);
2588 /* Ensure current function's preferred stack boundary is at least
2589 what we need. We don't have to increase alignment for recursive
2590 functions. */
2591 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2592 && fndecl != current_function_decl)
2593 cfun->preferred_stack_boundary = preferred_stack_boundary;
2594 if (fndecl == current_function_decl)
2595 cfun->recursive_call_emit = true;
2597 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2599 function_call_count++;
2601 /* We want to make two insn chains; one for a sibling call, the other
2602 for a normal call. We will select one of the two chains after
2603 initial RTL generation is complete. */
2604 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2606 int sibcall_failure = 0;
2607 /* We want to emit any pending stack adjustments before the tail
2608 recursion "call". That way we know any adjustment after the tail
2609 recursion call can be ignored if we indeed use the tail recursion
2610 call expansion. */
2611 int save_pending_stack_adjust = 0;
2612 int save_stack_pointer_delta = 0;
2613 rtx insns;
2614 rtx before_call, next_arg_reg;
2616 if (pass == 0)
2618 /* Emit any queued insns now; otherwise they would end up in
2619 only one of the alternates. */
2620 emit_queue ();
2622 /* State variables we need to save and restore between
2623 iterations. */
2624 save_pending_stack_adjust = pending_stack_adjust;
2625 save_stack_pointer_delta = stack_pointer_delta;
2627 if (pass)
2628 flags &= ~ECF_SIBCALL;
2629 else
2630 flags |= ECF_SIBCALL;
2632 /* Other state variables that we must reinitialize each time
2633 through the loop (that are not initialized by the loop itself). */
2634 argblock = 0;
2635 call_fusage = 0;
2637 /* Start a new sequence for the normal call case.
2639 From this point on, if the sibling call fails, we want to set
2640 sibcall_failure instead of continuing the loop. */
2641 start_sequence ();
2643 if (pass == 0)
2645 /* We know at this point that there are not currently any
2646 pending cleanups. If, however, in the process of evaluating
2647 the arguments we were to create some, we'll need to be
2648 able to get rid of them. */
2649 expand_start_target_temps ();
2652 /* Don't let pending stack adjusts add up to too much.
2653 Also, do all pending adjustments now if there is any chance
2654 this might be a call to alloca or if we are expanding a sibling
2655 call sequence or if we are calling a function that is to return
2656 with stack pointer depressed. */
2657 if (pending_stack_adjust >= 32
2658 || (pending_stack_adjust > 0
2659 && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
2660 || pass == 0)
2661 do_pending_stack_adjust ();
2663 /* When calling a const function, we must pop the stack args right away,
2664 so that the pop is deleted or moved with the call. */
2665 if (pass && (flags & ECF_LIBCALL_BLOCK))
2666 NO_DEFER_POP;
2668 #ifdef FINAL_REG_PARM_STACK_SPACE
2669 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2670 args_size.var);
2671 #endif
2672 /* Precompute any arguments as needed. */
2673 if (pass)
2674 precompute_arguments (flags, num_actuals, args);
2676 /* Now we are about to start emitting insns that can be deleted
2677 if a libcall is deleted. */
2678 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
2679 start_sequence ();
2681 adjusted_args_size = args_size;
2682 /* Compute the actual size of the argument block required. The variable
2683 and constant sizes must be combined, the size may have to be rounded,
2684 and there may be a minimum required size. When generating a sibcall
2685 pattern, do not round up, since we'll be re-using whatever space our
2686 caller provided. */
2687 unadjusted_args_size
2688 = compute_argument_block_size (reg_parm_stack_space,
2689 &adjusted_args_size,
2690 (pass == 0 ? 0
2691 : preferred_stack_boundary));
2693 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2695 /* The argument block when performing a sibling call is the
2696 incoming argument block. */
2697 if (pass == 0)
2699 argblock = virtual_incoming_args_rtx;
2700 argblock
2701 #ifdef STACK_GROWS_DOWNWARD
2702 = plus_constant (argblock, current_function_pretend_args_size);
2703 #else
2704 = plus_constant (argblock, -current_function_pretend_args_size);
2705 #endif
2706 stored_args_map = sbitmap_alloc (args_size.constant);
2707 sbitmap_zero (stored_args_map);
2710 /* If we have no actual push instructions, or shouldn't use them,
2711 make space for all args right now. */
2712 else if (adjusted_args_size.var != 0)
2714 if (old_stack_level == 0)
2716 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2717 old_stack_pointer_delta = stack_pointer_delta;
2718 old_pending_adj = pending_stack_adjust;
2719 pending_stack_adjust = 0;
2720 /* stack_arg_under_construction says whether a stack arg is
2721 being constructed at the old stack level. Pushing the stack
2722 gets a clean outgoing argument block. */
2723 old_stack_arg_under_construction = stack_arg_under_construction;
2724 stack_arg_under_construction = 0;
2726 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2728 else
2730 /* Note that we must go through the motions of allocating an argument
2731 block even if the size is zero because we may be storing args
2732 in the area reserved for register arguments, which may be part of
2733 the stack frame. */
2735 int needed = adjusted_args_size.constant;
2737 /* Store the maximum argument space used. It will be pushed by
2738 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2739 checking). */
2741 if (needed > current_function_outgoing_args_size)
2742 current_function_outgoing_args_size = needed;
2744 if (must_preallocate)
2746 if (ACCUMULATE_OUTGOING_ARGS)
2748 /* Since the stack pointer will never be pushed, it is
2749 possible for the evaluation of a parm to clobber
2750 something we have already written to the stack.
2751 Since most function calls on RISC machines do not use
2752 the stack, this is uncommon, but must work correctly.
2754 Therefore, we save any area of the stack that was already
2755 written and that we are using. Here we set up to do this
2756 by making a new stack usage map from the old one. The
2757 actual save will be done by store_one_arg.
2759 Another approach might be to try to reorder the argument
2760 evaluations to avoid this conflicting stack usage. */
2762 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2763 /* Since we will be writing into the entire argument area,
2764 the map must be allocated for its entire size, not just
2765 the part that is the responsibility of the caller. */
2766 needed += reg_parm_stack_space;
2767 #endif
2769 #ifdef ARGS_GROW_DOWNWARD
2770 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2771 needed + 1);
2772 #else
2773 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2774 needed);
2775 #endif
2776 stack_usage_map = alloca (highest_outgoing_arg_in_use);
2778 if (initial_highest_arg_in_use)
2779 memcpy (stack_usage_map, initial_stack_usage_map,
2780 initial_highest_arg_in_use);
2782 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2783 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2784 (highest_outgoing_arg_in_use
2785 - initial_highest_arg_in_use));
2786 needed = 0;
2788 /* The address of the outgoing argument list must not be
2789 copied to a register here, because argblock would be left
2790 pointing to the wrong place after the call to
2791 allocate_dynamic_stack_space below. */
2793 argblock = virtual_outgoing_args_rtx;
2795 else
2797 if (inhibit_defer_pop == 0)
2799 /* Try to reuse some or all of the pending_stack_adjust
2800 to get this space. */
2801 needed
2802 = (combine_pending_stack_adjustment_and_call
2803 (unadjusted_args_size,
2804 &adjusted_args_size,
2805 preferred_unit_stack_boundary));
2807 /* combine_pending_stack_adjustment_and_call computes
2808 an adjustment before the arguments are allocated.
2809 Account for them and see whether or not the stack
2810 needs to go up or down. */
2811 needed = unadjusted_args_size - needed;
2813 if (needed < 0)
2815 /* We're releasing stack space. */
2816 /* ??? We can avoid any adjustment at all if we're
2817 already aligned. FIXME. */
2818 pending_stack_adjust = -needed;
2819 do_pending_stack_adjust ();
2820 needed = 0;
2822 else
2823 /* We need to allocate space. We'll do that in
2824 push_block below. */
2825 pending_stack_adjust = 0;
2828 /* Special case this because overhead of `push_block' in
2829 this case is non-trivial. */
2830 if (needed == 0)
2831 argblock = virtual_outgoing_args_rtx;
2832 else
2834 argblock = push_block (GEN_INT (needed), 0, 0);
2835 #ifdef ARGS_GROW_DOWNWARD
2836 argblock = plus_constant (argblock, needed);
2837 #endif
2840 /* We only really need to call `copy_to_reg' in the case
2841 where push insns are going to be used to pass ARGBLOCK
2842 to a function call in ARGS. In that case, the stack
2843 pointer changes value from the allocation point to the
2844 call point, and hence the value of
2845 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2846 as well always do it. */
2847 argblock = copy_to_reg (argblock);
2852 if (ACCUMULATE_OUTGOING_ARGS)
2854 /* The save/restore code in store_one_arg handles all
2855 cases except one: a constructor call (including a C
2856 function returning a BLKmode struct) to initialize
2857 an argument. */
2858 if (stack_arg_under_construction)
2860 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2861 rtx push_size = GEN_INT (reg_parm_stack_space
2862 + adjusted_args_size.constant);
2863 #else
2864 rtx push_size = GEN_INT (adjusted_args_size.constant);
2865 #endif
2866 if (old_stack_level == 0)
2868 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2869 NULL_RTX);
2870 old_stack_pointer_delta = stack_pointer_delta;
2871 old_pending_adj = pending_stack_adjust;
2872 pending_stack_adjust = 0;
2873 /* stack_arg_under_construction says whether a stack
2874 arg is being constructed at the old stack level.
2875 Pushing the stack gets a clean outgoing argument
2876 block. */
2877 old_stack_arg_under_construction
2878 = stack_arg_under_construction;
2879 stack_arg_under_construction = 0;
2880 /* Make a new map for the new argument list. */
2881 stack_usage_map = alloca (highest_outgoing_arg_in_use);
2882 memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
2883 highest_outgoing_arg_in_use = 0;
2885 allocate_dynamic_stack_space (push_size, NULL_RTX,
2886 BITS_PER_UNIT);
2889 /* If argument evaluation might modify the stack pointer,
2890 copy the address of the argument list to a register. */
2891 for (i = 0; i < num_actuals; i++)
2892 if (args[i].pass_on_stack)
2894 argblock = copy_addr_to_reg (argblock);
2895 break;
2899 compute_argument_addresses (args, argblock, num_actuals);
2901 /* If we push args individually in reverse order, perform stack alignment
2902 before the first push (the last arg). */
2903 if (PUSH_ARGS_REVERSED && argblock == 0
2904 && adjusted_args_size.constant != unadjusted_args_size)
2906 /* When the stack adjustment is pending, we get better code
2907 by combining the adjustments. */
2908 if (pending_stack_adjust
2909 && ! (flags & ECF_LIBCALL_BLOCK)
2910 && ! inhibit_defer_pop)
2912 pending_stack_adjust
2913 = (combine_pending_stack_adjustment_and_call
2914 (unadjusted_args_size,
2915 &adjusted_args_size,
2916 preferred_unit_stack_boundary));
2917 do_pending_stack_adjust ();
2919 else if (argblock == 0)
2920 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2921 - unadjusted_args_size));
2923 /* Now that the stack is properly aligned, pops can't safely
2924 be deferred during the evaluation of the arguments. */
2925 NO_DEFER_POP;
2927 funexp = rtx_for_function_call (fndecl, addr);
2929 /* Figure out the register where the value, if any, will come back. */
2930 valreg = 0;
2931 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2932 && ! structure_value_addr)
2934 if (pcc_struct_value)
2935 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2936 fndecl, (pass == 0));
2937 else
2938 valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
2941 /* Precompute all register parameters. It isn't safe to compute anything
2942 once we have started filling any specific hard regs. */
2943 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2945 #ifdef REG_PARM_STACK_SPACE
2946 /* Save the fixed argument area if it's part of the caller's frame and
2947 is clobbered by argument setup for this call. */
2948 if (ACCUMULATE_OUTGOING_ARGS && pass)
2949 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2950 &low_to_save, &high_to_save);
2951 #endif
2953 /* Now store (and compute if necessary) all non-register parms.
2954 These come before register parms, since they can require block-moves,
2955 which could clobber the registers used for register parms.
2956 Parms which have partial registers are not stored here,
2957 but we do preallocate space here if they want that. */
2959 for (i = 0; i < num_actuals; i++)
2960 if (args[i].reg == 0 || args[i].pass_on_stack)
2962 rtx before_arg = get_last_insn ();
2964 if (store_one_arg (&args[i], argblock, flags,
2965 adjusted_args_size.var != 0,
2966 reg_parm_stack_space)
2967 || (pass == 0
2968 && check_sibcall_argument_overlap (before_arg,
2969 &args[i], 1)))
2970 sibcall_failure = 1;
2973 /* If we have a parm that is passed in registers but not in memory
2974 and whose alignment does not permit a direct copy into registers,
2975 make a group of pseudos that correspond to each register that we
2976 will later fill. */
2977 if (STRICT_ALIGNMENT)
2978 store_unaligned_arguments_into_pseudos (args, num_actuals);
2980 /* Now store any partially-in-registers parm.
2981 This is the last place a block-move can happen. */
2982 if (reg_parm_seen)
2983 for (i = 0; i < num_actuals; i++)
2984 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2986 rtx before_arg = get_last_insn ();
2988 if (store_one_arg (&args[i], argblock, flags,
2989 adjusted_args_size.var != 0,
2990 reg_parm_stack_space)
2991 || (pass == 0
2992 && check_sibcall_argument_overlap (before_arg,
2993 &args[i], 1)))
2994 sibcall_failure = 1;
2997 /* If we pushed args in forward order, perform stack alignment
2998 after pushing the last arg. */
2999 if (!PUSH_ARGS_REVERSED && argblock == 0)
3000 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
3001 - unadjusted_args_size));
3003 /* If register arguments require space on the stack and stack space
3004 was not preallocated, allocate stack space here for arguments
3005 passed in registers. */
3006 #ifdef OUTGOING_REG_PARM_STACK_SPACE
3007 if (!ACCUMULATE_OUTGOING_ARGS
3008 && must_preallocate == 0 && reg_parm_stack_space > 0)
3009 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
3010 #endif
3012 /* Pass the function the address in which to return a
3013 structure value. */
3014 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3016 #ifdef POINTERS_EXTEND_UNSIGNED
3017 if (GET_MODE (structure_value_addr) != Pmode)
3018 structure_value_addr = convert_memory_address
3019 (Pmode, structure_value_addr);
3020 #endif
3021 emit_move_insn (struct_value,
3022 force_reg (Pmode,
3023 force_operand (structure_value_addr,
3024 NULL_RTX)));
3026 if (GET_CODE (struct_value) == REG)
3027 use_reg (&call_fusage, struct_value);
3030 funexp = prepare_call_address (funexp, fndecl, &call_fusage,
3031 reg_parm_seen, pass == 0);
3033 load_register_parameters (args, num_actuals, &call_fusage, flags,
3034 pass == 0, &sibcall_failure);
3036 /* Perform postincrements before actually calling the function. */
3037 emit_queue ();
3039 /* Save a pointer to the last insn before the call, so that we can
3040 later safely search backwards to find the CALL_INSN. */
3041 before_call = get_last_insn ();
3043 /* Set up next argument register. For sibling calls on machines
3044 with register windows this should be the incoming register. */
3045 #ifdef FUNCTION_INCOMING_ARG
3046 if (pass == 0)
3047 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
3048 void_type_node, 1);
3049 else
3050 #endif
3051 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
3052 void_type_node, 1);
3054 /* All arguments and registers used for the call must be set up by
3055 now! */
3057 /* Stack must be properly aligned now. */
3058 if (pass && stack_pointer_delta % preferred_unit_stack_boundary)
3059 abort ();
3061 /* Generate the actual call instruction. */
3062 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
3063 adjusted_args_size.constant, struct_value_size,
3064 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
3065 flags, & args_so_far);
3067 /* If call is cse'able, make appropriate pair of reg-notes around it.
3068 Test valreg so we don't crash; may safely ignore `const'
3069 if return type is void. Disable for PARALLEL return values, because
3070 we have no way to move such values into a pseudo register. */
3071 if (pass && (flags & ECF_LIBCALL_BLOCK))
3073 rtx insns;
3075 if (valreg == 0 || GET_CODE (valreg) == PARALLEL)
3077 insns = get_insns ();
3078 end_sequence ();
3079 emit_insn (insns);
3081 else
3083 rtx note = 0;
3084 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3086 /* Mark the return value as a pointer if needed. */
3087 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3088 mark_reg_pointer (temp,
3089 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
3091 /* Construct an "equal form" for the value which mentions all the
3092 arguments in order as well as the function name. */
3093 for (i = 0; i < num_actuals; i++)
3094 note = gen_rtx_EXPR_LIST (VOIDmode,
3095 args[i].initial_value, note);
3096 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
3098 insns = get_insns ();
3099 end_sequence ();
3101 if (flags & ECF_PURE)
3102 note = gen_rtx_EXPR_LIST (VOIDmode,
3103 gen_rtx_USE (VOIDmode,
3104 gen_rtx_MEM (BLKmode,
3105 gen_rtx_SCRATCH (VOIDmode))),
3106 note);
3108 emit_libcall_block (insns, temp, valreg, note);
3110 valreg = temp;
3113 else if (pass && (flags & ECF_MALLOC))
3115 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3116 rtx last, insns;
3118 /* The return value from a malloc-like function is a pointer. */
3119 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3120 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
3122 emit_move_insn (temp, valreg);
3124 /* The return value from a malloc-like function can not alias
3125 anything else. */
3126 last = get_last_insn ();
3127 REG_NOTES (last) =
3128 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
3130 /* Write out the sequence. */
3131 insns = get_insns ();
3132 end_sequence ();
3133 emit_insn (insns);
3134 valreg = temp;
3137 /* For calls to `setjmp', etc., inform flow.c it should complain
3138 if nonvolatile values are live. For functions that cannot return,
3139 inform flow that control does not fall through. */
3141 if ((flags & (ECF_NORETURN | ECF_LONGJMP)) || pass == 0)
3143 /* The barrier must be emitted
3144 immediately after the CALL_INSN. Some ports emit more
3145 than just a CALL_INSN above, so we must search for it here. */
3147 rtx last = get_last_insn ();
3148 while (GET_CODE (last) != CALL_INSN)
3150 last = PREV_INSN (last);
3151 /* There was no CALL_INSN? */
3152 if (last == before_call)
3153 abort ();
3156 emit_barrier_after (last);
3158 /* Stack adjustments after a noreturn call are dead code. */
3159 stack_pointer_delta = old_stack_allocated;
3160 pending_stack_adjust = 0;
3163 if (flags & ECF_LONGJMP)
3164 current_function_calls_longjmp = 1;
3166 /* If value type not void, return an rtx for the value. */
3168 /* If there are cleanups to be called, don't use a hard reg as target.
3169 We need to double check this and see if it matters anymore. */
3170 if (any_pending_cleanups ())
3172 if (target && REG_P (target)
3173 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3174 target = 0;
3175 sibcall_failure = 1;
3178 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
3179 || ignore)
3180 target = const0_rtx;
3181 else if (structure_value_addr)
3183 if (target == 0 || GET_CODE (target) != MEM)
3185 target
3186 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3187 memory_address (TYPE_MODE (TREE_TYPE (exp)),
3188 structure_value_addr));
3189 set_mem_attributes (target, exp, 1);
3192 else if (pcc_struct_value)
3194 /* This is the special C++ case where we need to
3195 know what the true target was. We take care to
3196 never use this value more than once in one expression. */
3197 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3198 copy_to_reg (valreg));
3199 set_mem_attributes (target, exp, 1);
3201 /* Handle calls that return values in multiple non-contiguous locations.
3202 The Irix 6 ABI has examples of this. */
3203 else if (GET_CODE (valreg) == PARALLEL)
3205 if (target == 0)
3207 /* This will only be assigned once, so it can be readonly. */
3208 tree nt = build_qualified_type (TREE_TYPE (exp),
3209 (TYPE_QUALS (TREE_TYPE (exp))
3210 | TYPE_QUAL_CONST));
3212 target = assign_temp (nt, 0, 1, 1);
3213 preserve_temp_slots (target);
3216 if (! rtx_equal_p (target, valreg))
3217 emit_group_store (target, valreg, TREE_TYPE (exp),
3218 int_size_in_bytes (TREE_TYPE (exp)));
3220 /* We can not support sibling calls for this case. */
3221 sibcall_failure = 1;
3223 else if (target
3224 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
3225 && GET_MODE (target) == GET_MODE (valreg))
3227 /* TARGET and VALREG cannot be equal at this point because the
3228 latter would not have REG_FUNCTION_VALUE_P true, while the
3229 former would if it were referring to the same register.
3231 If they refer to the same register, this move will be a no-op,
3232 except when function inlining is being done. */
3233 emit_move_insn (target, valreg);
3235 /* If we are setting a MEM, this code must be executed. Since it is
3236 emitted after the call insn, sibcall optimization cannot be
3237 performed in that case. */
3238 if (GET_CODE (target) == MEM)
3239 sibcall_failure = 1;
3241 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
3243 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
3245 /* We can not support sibling calls for this case. */
3246 sibcall_failure = 1;
3248 else
3249 target = copy_to_reg (valreg);
3251 if (targetm.calls.promote_function_return(funtype))
3253 /* If we promoted this return value, make the proper SUBREG. TARGET
3254 might be const0_rtx here, so be careful. */
3255 if (GET_CODE (target) == REG
3256 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3257 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3259 tree type = TREE_TYPE (exp);
3260 int unsignedp = TREE_UNSIGNED (type);
3261 int offset = 0;
3263 /* If we don't promote as expected, something is wrong. */
3264 if (GET_MODE (target)
3265 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
3266 abort ();
3268 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3269 && GET_MODE_SIZE (GET_MODE (target))
3270 > GET_MODE_SIZE (TYPE_MODE (type)))
3272 offset = GET_MODE_SIZE (GET_MODE (target))
3273 - GET_MODE_SIZE (TYPE_MODE (type));
3274 if (! BYTES_BIG_ENDIAN)
3275 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3276 else if (! WORDS_BIG_ENDIAN)
3277 offset %= UNITS_PER_WORD;
3279 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3280 SUBREG_PROMOTED_VAR_P (target) = 1;
3281 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3285 /* If size of args is variable or this was a constructor call for a stack
3286 argument, restore saved stack-pointer value. */
3288 if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
3290 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3291 stack_pointer_delta = old_stack_pointer_delta;
3292 pending_stack_adjust = old_pending_adj;
3293 stack_arg_under_construction = old_stack_arg_under_construction;
3294 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3295 stack_usage_map = initial_stack_usage_map;
3296 sibcall_failure = 1;
3298 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3300 #ifdef REG_PARM_STACK_SPACE
3301 if (save_area)
3302 restore_fixed_argument_area (save_area, argblock,
3303 high_to_save, low_to_save);
3304 #endif
3306 /* If we saved any argument areas, restore them. */
3307 for (i = 0; i < num_actuals; i++)
3308 if (args[i].save_area)
3310 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3311 rtx stack_area
3312 = gen_rtx_MEM (save_mode,
3313 memory_address (save_mode,
3314 XEXP (args[i].stack_slot, 0)));
3316 if (save_mode != BLKmode)
3317 emit_move_insn (stack_area, args[i].save_area);
3318 else
3319 emit_block_move (stack_area, args[i].save_area,
3320 GEN_INT (args[i].locate.size.constant),
3321 BLOCK_OP_CALL_PARM);
3324 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3325 stack_usage_map = initial_stack_usage_map;
3328 /* If this was alloca, record the new stack level for nonlocal gotos.
3329 Check for the handler slots since we might not have a save area
3330 for non-local gotos. */
3332 if ((flags & ECF_MAY_BE_ALLOCA) && nonlocal_goto_handler_slots != 0)
3333 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
3335 /* Free up storage we no longer need. */
3336 for (i = 0; i < num_actuals; ++i)
3337 if (args[i].aligned_regs)
3338 free (args[i].aligned_regs);
3340 if (pass == 0)
3342 /* Undo the fake expand_start_target_temps we did earlier. If
3343 there had been any cleanups created, we've already set
3344 sibcall_failure. */
3345 expand_end_target_temps ();
3348 /* If this function is returning into a memory location marked as
3349 readonly, it means it is initializing that location. We normally treat
3350 functions as not clobbering such locations, so we need to specify that
3351 this one does. We do this by adding the appropriate CLOBBER to the
3352 CALL_INSN function usage list. This cannot be done by emitting a
3353 standalone CLOBBER after the call because the latter would be ignored
3354 by at least the delay slot scheduling pass. We do this now instead of
3355 adding to call_fusage before the call to emit_call_1 because TARGET
3356 may be modified in the meantime. */
3357 if (structure_value_addr != 0 && target != 0
3358 && GET_CODE (target) == MEM && RTX_UNCHANGING_P (target))
3359 add_function_usage_to
3360 (last_call_insn (),
3361 gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_CLOBBER (VOIDmode, target),
3362 NULL_RTX));
3364 insns = get_insns ();
3365 end_sequence ();
3367 if (pass == 0)
3369 tail_call_insns = insns;
3371 /* Restore the pending stack adjustment now that we have
3372 finished generating the sibling call sequence. */
3374 pending_stack_adjust = save_pending_stack_adjust;
3375 stack_pointer_delta = save_stack_pointer_delta;
3377 /* Prepare arg structure for next iteration. */
3378 for (i = 0; i < num_actuals; i++)
3380 args[i].value = 0;
3381 args[i].aligned_regs = 0;
3382 args[i].stack = 0;
3385 sbitmap_free (stored_args_map);
3387 else
3389 normal_call_insns = insns;
3391 /* Verify that we've deallocated all the stack we used. */
3392 if (! (flags & (ECF_NORETURN | ECF_LONGJMP))
3393 && old_stack_allocated != stack_pointer_delta
3394 - pending_stack_adjust)
3395 abort ();
3398 /* If something prevents making this a sibling call,
3399 zero out the sequence. */
3400 if (sibcall_failure)
3401 tail_call_insns = NULL_RTX;
3404 /* The function optimize_sibling_and_tail_recursive_calls doesn't
3405 handle CALL_PLACEHOLDERs inside other CALL_PLACEHOLDERs. This
3406 can happen if the arguments to this function call an inline
3407 function who's expansion contains another CALL_PLACEHOLDER.
3409 If there are any C_Ps in any of these sequences, replace them
3410 with their normal call. */
3412 for (insn = normal_call_insns; insn; insn = NEXT_INSN (insn))
3413 if (GET_CODE (insn) == CALL_INSN
3414 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3415 replace_call_placeholder (insn, sibcall_use_normal);
3417 for (insn = tail_call_insns; insn; insn = NEXT_INSN (insn))
3418 if (GET_CODE (insn) == CALL_INSN
3419 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3420 replace_call_placeholder (insn, sibcall_use_normal);
3422 for (insn = tail_recursion_insns; insn; insn = NEXT_INSN (insn))
3423 if (GET_CODE (insn) == CALL_INSN
3424 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3425 replace_call_placeholder (insn, sibcall_use_normal);
3427 /* If this was a potential tail recursion site, then emit a
3428 CALL_PLACEHOLDER with the normal and the tail recursion streams.
3429 One of them will be selected later. */
3430 if (tail_recursion_insns || tail_call_insns)
3432 /* The tail recursion label must be kept around. We could expose
3433 its use in the CALL_PLACEHOLDER, but that creates unwanted edges
3434 and makes determining true tail recursion sites difficult.
3436 So we set LABEL_PRESERVE_P here, then clear it when we select
3437 one of the call sequences after rtl generation is complete. */
3438 if (tail_recursion_insns)
3439 LABEL_PRESERVE_P (tail_recursion_label) = 1;
3440 emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode, normal_call_insns,
3441 tail_call_insns,
3442 tail_recursion_insns,
3443 tail_recursion_label));
3445 else
3446 emit_insn (normal_call_insns);
3448 currently_expanding_call--;
3450 /* If this function returns with the stack pointer depressed, ensure
3451 this block saves and restores the stack pointer, show it was
3452 changed, and adjust for any outgoing arg space. */
3453 if (flags & ECF_SP_DEPRESSED)
3455 clear_pending_stack_adjust ();
3456 emit_insn (gen_rtx (CLOBBER, VOIDmode, stack_pointer_rtx));
3457 emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3458 save_stack_pointer ();
3461 return target;
3464 /* Traverse an argument list in VALUES and expand all complex
3465 arguments into their components. */
3466 tree
3467 split_complex_values (tree values)
3469 tree p;
3471 values = copy_list (values);
3473 for (p = values; p; p = TREE_CHAIN (p))
3475 tree complex_value = TREE_VALUE (p);
3476 tree complex_type;
3478 complex_type = TREE_TYPE (complex_value);
3479 if (!complex_type)
3480 continue;
3482 if (TREE_CODE (complex_type) == COMPLEX_TYPE)
3484 tree subtype;
3485 tree real, imag, next;
3487 subtype = TREE_TYPE (complex_type);
3488 complex_value = save_expr (complex_value);
3489 real = build1 (REALPART_EXPR, subtype, complex_value);
3490 imag = build1 (IMAGPART_EXPR, subtype, complex_value);
3492 TREE_VALUE (p) = real;
3493 next = TREE_CHAIN (p);
3494 imag = build_tree_list (NULL_TREE, imag);
3495 TREE_CHAIN (p) = imag;
3496 TREE_CHAIN (imag) = next;
3498 /* Skip the newly created node. */
3499 p = TREE_CHAIN (p);
3503 return values;
3506 /* Traverse a list of TYPES and expand all complex types into their
3507 components. */
3508 tree
3509 split_complex_types (tree types)
3511 tree p;
3513 types = copy_list (types);
3515 for (p = types; p; p = TREE_CHAIN (p))
3517 tree complex_type = TREE_VALUE (p);
3519 if (TREE_CODE (complex_type) == COMPLEX_TYPE)
3521 tree next, imag;
3523 /* Rewrite complex type with component type. */
3524 TREE_VALUE (p) = TREE_TYPE (complex_type);
3525 next = TREE_CHAIN (p);
3527 /* Add another component type for the imaginary part. */
3528 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3529 TREE_CHAIN (p) = imag;
3530 TREE_CHAIN (imag) = next;
3532 /* Skip the newly created node. */
3533 p = TREE_CHAIN (p);
3537 return types;
3540 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3541 The RETVAL parameter specifies whether return value needs to be saved, other
3542 parameters are documented in the emit_library_call function below. */
3544 static rtx
3545 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3546 enum libcall_type fn_type,
3547 enum machine_mode outmode, int nargs, va_list p)
3549 /* Total size in bytes of all the stack-parms scanned so far. */
3550 struct args_size args_size;
3551 /* Size of arguments before any adjustments (such as rounding). */
3552 struct args_size original_args_size;
3553 int argnum;
3554 rtx fun;
3555 int inc;
3556 int count;
3557 rtx argblock = 0;
3558 CUMULATIVE_ARGS args_so_far;
3559 struct arg
3561 rtx value;
3562 enum machine_mode mode;
3563 rtx reg;
3564 int partial;
3565 struct locate_and_pad_arg_data locate;
3566 rtx save_area;
3568 struct arg *argvec;
3569 int old_inhibit_defer_pop = inhibit_defer_pop;
3570 rtx call_fusage = 0;
3571 rtx mem_value = 0;
3572 rtx valreg;
3573 int pcc_struct_value = 0;
3574 int struct_value_size = 0;
3575 int flags;
3576 int reg_parm_stack_space = 0;
3577 int needed;
3578 rtx before_call;
3579 tree tfom; /* type_for_mode (outmode, 0) */
3581 #ifdef REG_PARM_STACK_SPACE
3582 /* Define the boundary of the register parm stack space that needs to be
3583 save, if any. */
3584 int low_to_save, high_to_save;
3585 rtx save_area = 0; /* Place that it is saved. */
3586 #endif
3588 /* Size of the stack reserved for parameter registers. */
3589 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3590 char *initial_stack_usage_map = stack_usage_map;
3592 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3594 #ifdef REG_PARM_STACK_SPACE
3595 #ifdef MAYBE_REG_PARM_STACK_SPACE
3596 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3597 #else
3598 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3599 #endif
3600 #endif
3602 /* By default, library functions can not throw. */
3603 flags = ECF_NOTHROW;
3605 switch (fn_type)
3607 case LCT_NORMAL:
3608 break;
3609 case LCT_CONST:
3610 flags |= ECF_CONST;
3611 break;
3612 case LCT_PURE:
3613 flags |= ECF_PURE;
3614 break;
3615 case LCT_CONST_MAKE_BLOCK:
3616 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
3617 break;
3618 case LCT_PURE_MAKE_BLOCK:
3619 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
3620 break;
3621 case LCT_NORETURN:
3622 flags |= ECF_NORETURN;
3623 break;
3624 case LCT_THROW:
3625 flags = ECF_NORETURN;
3626 break;
3627 case LCT_ALWAYS_RETURN:
3628 flags = ECF_ALWAYS_RETURN;
3629 break;
3630 case LCT_RETURNS_TWICE:
3631 flags = ECF_RETURNS_TWICE;
3632 break;
3634 fun = orgfun;
3636 /* Ensure current function's preferred stack boundary is at least
3637 what we need. */
3638 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3639 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3641 /* If this kind of value comes back in memory,
3642 decide where in memory it should come back. */
3643 if (outmode != VOIDmode)
3645 tfom = (*lang_hooks.types.type_for_mode) (outmode, 0);
3646 if (aggregate_value_p (tfom, 0))
3648 #ifdef PCC_STATIC_STRUCT_RETURN
3649 rtx pointer_reg
3650 = hard_function_value (build_pointer_type (tfom), 0, 0);
3651 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3652 pcc_struct_value = 1;
3653 if (value == 0)
3654 value = gen_reg_rtx (outmode);
3655 #else /* not PCC_STATIC_STRUCT_RETURN */
3656 struct_value_size = GET_MODE_SIZE (outmode);
3657 if (value != 0 && GET_CODE (value) == MEM)
3658 mem_value = value;
3659 else
3660 mem_value = assign_temp (tfom, 0, 1, 1);
3661 #endif
3662 /* This call returns a big structure. */
3663 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3666 else
3667 tfom = void_type_node;
3669 /* ??? Unfinished: must pass the memory address as an argument. */
3671 /* Copy all the libcall-arguments out of the varargs data
3672 and into a vector ARGVEC.
3674 Compute how to pass each argument. We only support a very small subset
3675 of the full argument passing conventions to limit complexity here since
3676 library functions shouldn't have many args. */
3678 argvec = alloca ((nargs + 1) * sizeof (struct arg));
3679 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3681 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3682 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3683 #else
3684 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3685 #endif
3687 args_size.constant = 0;
3688 args_size.var = 0;
3690 count = 0;
3692 /* Now we are about to start emitting insns that can be deleted
3693 if a libcall is deleted. */
3694 if (flags & ECF_LIBCALL_BLOCK)
3695 start_sequence ();
3697 push_temp_slots ();
3699 /* If there's a structure value address to be passed,
3700 either pass it in the special place, or pass it as an extra argument. */
3701 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3703 rtx addr = XEXP (mem_value, 0);
3704 nargs++;
3706 /* Make sure it is a reasonable operand for a move or push insn. */
3707 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3708 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3709 addr = force_operand (addr, NULL_RTX);
3711 argvec[count].value = addr;
3712 argvec[count].mode = Pmode;
3713 argvec[count].partial = 0;
3715 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3716 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3717 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3718 abort ();
3719 #endif
3721 locate_and_pad_parm (Pmode, NULL_TREE,
3722 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3724 #else
3725 argvec[count].reg != 0,
3726 #endif
3727 0, NULL_TREE, &args_size, &argvec[count].locate);
3729 if (argvec[count].reg == 0 || argvec[count].partial != 0
3730 || reg_parm_stack_space > 0)
3731 args_size.constant += argvec[count].locate.size.constant;
3733 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3735 count++;
3738 for (; count < nargs; count++)
3740 rtx val = va_arg (p, rtx);
3741 enum machine_mode mode = va_arg (p, enum machine_mode);
3743 /* We cannot convert the arg value to the mode the library wants here;
3744 must do it earlier where we know the signedness of the arg. */
3745 if (mode == BLKmode
3746 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3747 abort ();
3749 /* There's no need to call protect_from_queue, because
3750 either emit_move_insn or emit_push_insn will do that. */
3752 /* Make sure it is a reasonable operand for a move or push insn. */
3753 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3754 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3755 val = force_operand (val, NULL_RTX);
3757 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3758 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3760 rtx slot;
3761 int must_copy = 1
3762 #ifdef FUNCTION_ARG_CALLEE_COPIES
3763 && ! FUNCTION_ARG_CALLEE_COPIES (args_so_far, mode,
3764 NULL_TREE, 1)
3765 #endif
3768 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3769 functions, so we have to pretend this isn't such a function. */
3770 if (flags & ECF_LIBCALL_BLOCK)
3772 rtx insns = get_insns ();
3773 end_sequence ();
3774 emit_insn (insns);
3776 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3778 /* If this was a CONST function, it is now PURE since
3779 it now reads memory. */
3780 if (flags & ECF_CONST)
3782 flags &= ~ECF_CONST;
3783 flags |= ECF_PURE;
3786 if (GET_MODE (val) == MEM && ! must_copy)
3787 slot = val;
3788 else if (must_copy)
3790 slot = assign_temp ((*lang_hooks.types.type_for_mode) (mode, 0),
3791 0, 1, 1);
3792 emit_move_insn (slot, val);
3794 else
3796 tree type = (*lang_hooks.types.type_for_mode) (mode, 0);
3798 slot
3799 = gen_rtx_MEM (mode,
3800 expand_expr (build1 (ADDR_EXPR,
3801 build_pointer_type (type),
3802 make_tree (type, val)),
3803 NULL_RTX, VOIDmode, 0));
3806 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3807 gen_rtx_USE (VOIDmode, slot),
3808 call_fusage);
3809 if (must_copy)
3810 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3811 gen_rtx_CLOBBER (VOIDmode,
3812 slot),
3813 call_fusage);
3815 mode = Pmode;
3816 val = force_operand (XEXP (slot, 0), NULL_RTX);
3818 #endif
3820 argvec[count].value = val;
3821 argvec[count].mode = mode;
3823 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3825 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3826 argvec[count].partial
3827 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3828 #else
3829 argvec[count].partial = 0;
3830 #endif
3832 locate_and_pad_parm (mode, NULL_TREE,
3833 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3835 #else
3836 argvec[count].reg != 0,
3837 #endif
3838 argvec[count].partial,
3839 NULL_TREE, &args_size, &argvec[count].locate);
3841 if (argvec[count].locate.size.var)
3842 abort ();
3844 if (argvec[count].reg == 0 || argvec[count].partial != 0
3845 || reg_parm_stack_space > 0)
3846 args_size.constant += argvec[count].locate.size.constant;
3848 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3851 #ifdef FINAL_REG_PARM_STACK_SPACE
3852 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3853 args_size.var);
3854 #endif
3855 /* If this machine requires an external definition for library
3856 functions, write one out. */
3857 assemble_external_libcall (fun);
3859 original_args_size = args_size;
3860 args_size.constant = (((args_size.constant
3861 + stack_pointer_delta
3862 + STACK_BYTES - 1)
3863 / STACK_BYTES
3864 * STACK_BYTES)
3865 - stack_pointer_delta);
3867 args_size.constant = MAX (args_size.constant,
3868 reg_parm_stack_space);
3870 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3871 args_size.constant -= reg_parm_stack_space;
3872 #endif
3874 if (args_size.constant > current_function_outgoing_args_size)
3875 current_function_outgoing_args_size = args_size.constant;
3877 if (ACCUMULATE_OUTGOING_ARGS)
3879 /* Since the stack pointer will never be pushed, it is possible for
3880 the evaluation of a parm to clobber something we have already
3881 written to the stack. Since most function calls on RISC machines
3882 do not use the stack, this is uncommon, but must work correctly.
3884 Therefore, we save any area of the stack that was already written
3885 and that we are using. Here we set up to do this by making a new
3886 stack usage map from the old one.
3888 Another approach might be to try to reorder the argument
3889 evaluations to avoid this conflicting stack usage. */
3891 needed = args_size.constant;
3893 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3894 /* Since we will be writing into the entire argument area, the
3895 map must be allocated for its entire size, not just the part that
3896 is the responsibility of the caller. */
3897 needed += reg_parm_stack_space;
3898 #endif
3900 #ifdef ARGS_GROW_DOWNWARD
3901 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3902 needed + 1);
3903 #else
3904 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3905 needed);
3906 #endif
3907 stack_usage_map = alloca (highest_outgoing_arg_in_use);
3909 if (initial_highest_arg_in_use)
3910 memcpy (stack_usage_map, initial_stack_usage_map,
3911 initial_highest_arg_in_use);
3913 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3914 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3915 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3916 needed = 0;
3918 /* We must be careful to use virtual regs before they're instantiated,
3919 and real regs afterwards. Loop optimization, for example, can create
3920 new libcalls after we've instantiated the virtual regs, and if we
3921 use virtuals anyway, they won't match the rtl patterns. */
3923 if (virtuals_instantiated)
3924 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3925 else
3926 argblock = virtual_outgoing_args_rtx;
3928 else
3930 if (!PUSH_ARGS)
3931 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3934 /* If we push args individually in reverse order, perform stack alignment
3935 before the first push (the last arg). */
3936 if (argblock == 0 && PUSH_ARGS_REVERSED)
3937 anti_adjust_stack (GEN_INT (args_size.constant
3938 - original_args_size.constant));
3940 if (PUSH_ARGS_REVERSED)
3942 inc = -1;
3943 argnum = nargs - 1;
3945 else
3947 inc = 1;
3948 argnum = 0;
3951 #ifdef REG_PARM_STACK_SPACE
3952 if (ACCUMULATE_OUTGOING_ARGS)
3954 /* The argument list is the property of the called routine and it
3955 may clobber it. If the fixed area has been used for previous
3956 parameters, we must save and restore it. */
3957 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3958 &low_to_save, &high_to_save);
3960 #endif
3962 /* Push the args that need to be pushed. */
3964 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3965 are to be pushed. */
3966 for (count = 0; count < nargs; count++, argnum += inc)
3968 enum machine_mode mode = argvec[argnum].mode;
3969 rtx val = argvec[argnum].value;
3970 rtx reg = argvec[argnum].reg;
3971 int partial = argvec[argnum].partial;
3972 int lower_bound = 0, upper_bound = 0, i;
3974 if (! (reg != 0 && partial == 0))
3976 if (ACCUMULATE_OUTGOING_ARGS)
3978 /* If this is being stored into a pre-allocated, fixed-size,
3979 stack area, save any previous data at that location. */
3981 #ifdef ARGS_GROW_DOWNWARD
3982 /* stack_slot is negative, but we want to index stack_usage_map
3983 with positive values. */
3984 upper_bound = -argvec[argnum].locate.offset.constant + 1;
3985 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3986 #else
3987 lower_bound = argvec[argnum].locate.offset.constant;
3988 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3989 #endif
3991 i = lower_bound;
3992 /* Don't worry about things in the fixed argument area;
3993 it has already been saved. */
3994 if (i < reg_parm_stack_space)
3995 i = reg_parm_stack_space;
3996 while (i < upper_bound && stack_usage_map[i] == 0)
3997 i++;
3999 if (i < upper_bound)
4001 /* We need to make a save area. */
4002 unsigned int size
4003 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
4004 enum machine_mode save_mode
4005 = mode_for_size (size, MODE_INT, 1);
4006 rtx adr
4007 = plus_constant (argblock,
4008 argvec[argnum].locate.offset.constant);
4009 rtx stack_area
4010 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
4011 argvec[argnum].save_area = gen_reg_rtx (save_mode);
4013 emit_move_insn (argvec[argnum].save_area, stack_area);
4017 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
4018 partial, reg, 0, argblock,
4019 GEN_INT (argvec[argnum].locate.offset.constant),
4020 reg_parm_stack_space,
4021 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
4023 /* Now mark the segment we just used. */
4024 if (ACCUMULATE_OUTGOING_ARGS)
4025 for (i = lower_bound; i < upper_bound; i++)
4026 stack_usage_map[i] = 1;
4028 NO_DEFER_POP;
4032 /* If we pushed args in forward order, perform stack alignment
4033 after pushing the last arg. */
4034 if (argblock == 0 && !PUSH_ARGS_REVERSED)
4035 anti_adjust_stack (GEN_INT (args_size.constant
4036 - original_args_size.constant));
4038 if (PUSH_ARGS_REVERSED)
4039 argnum = nargs - 1;
4040 else
4041 argnum = 0;
4043 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0, 0);
4045 /* Now load any reg parms into their regs. */
4047 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4048 are to be pushed. */
4049 for (count = 0; count < nargs; count++, argnum += inc)
4051 rtx val = argvec[argnum].value;
4052 rtx reg = argvec[argnum].reg;
4053 int partial = argvec[argnum].partial;
4055 /* Handle calls that pass values in multiple non-contiguous
4056 locations. The PA64 has examples of this for library calls. */
4057 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4058 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (GET_MODE (val)));
4059 else if (reg != 0 && partial == 0)
4060 emit_move_insn (reg, val);
4062 NO_DEFER_POP;
4065 /* Any regs containing parms remain in use through the call. */
4066 for (count = 0; count < nargs; count++)
4068 rtx reg = argvec[count].reg;
4069 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4070 use_group_regs (&call_fusage, reg);
4071 else if (reg != 0)
4072 use_reg (&call_fusage, reg);
4075 /* Pass the function the address in which to return a structure value. */
4076 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
4078 emit_move_insn (struct_value,
4079 force_reg (Pmode,
4080 force_operand (XEXP (mem_value, 0),
4081 NULL_RTX)));
4082 if (GET_CODE (struct_value) == REG)
4083 use_reg (&call_fusage, struct_value);
4086 /* Don't allow popping to be deferred, since then
4087 cse'ing of library calls could delete a call and leave the pop. */
4088 NO_DEFER_POP;
4089 valreg = (mem_value == 0 && outmode != VOIDmode
4090 ? hard_libcall_value (outmode) : NULL_RTX);
4092 /* Stack must be properly aligned now. */
4093 if (stack_pointer_delta & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))
4094 abort ();
4096 before_call = get_last_insn ();
4098 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4099 will set inhibit_defer_pop to that value. */
4100 /* The return type is needed to decide how many bytes the function pops.
4101 Signedness plays no role in that, so for simplicity, we pretend it's
4102 always signed. We also assume that the list of arguments passed has
4103 no impact, so we pretend it is unknown. */
4105 emit_call_1 (fun,
4106 get_identifier (XSTR (orgfun, 0)),
4107 build_function_type (tfom, NULL_TREE),
4108 original_args_size.constant, args_size.constant,
4109 struct_value_size,
4110 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
4111 valreg,
4112 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
4114 /* For calls to `setjmp', etc., inform flow.c it should complain
4115 if nonvolatile values are live. For functions that cannot return,
4116 inform flow that control does not fall through. */
4118 if (flags & (ECF_NORETURN | ECF_LONGJMP))
4120 /* The barrier note must be emitted
4121 immediately after the CALL_INSN. Some ports emit more than
4122 just a CALL_INSN above, so we must search for it here. */
4124 rtx last = get_last_insn ();
4125 while (GET_CODE (last) != CALL_INSN)
4127 last = PREV_INSN (last);
4128 /* There was no CALL_INSN? */
4129 if (last == before_call)
4130 abort ();
4133 emit_barrier_after (last);
4136 /* Now restore inhibit_defer_pop to its actual original value. */
4137 OK_DEFER_POP;
4139 /* If call is cse'able, make appropriate pair of reg-notes around it.
4140 Test valreg so we don't crash; may safely ignore `const'
4141 if return type is void. Disable for PARALLEL return values, because
4142 we have no way to move such values into a pseudo register. */
4143 if (flags & ECF_LIBCALL_BLOCK)
4145 rtx insns;
4147 if (valreg == 0)
4149 insns = get_insns ();
4150 end_sequence ();
4151 emit_insn (insns);
4153 else
4155 rtx note = 0;
4156 rtx temp;
4157 int i;
4159 if (GET_CODE (valreg) == PARALLEL)
4161 temp = gen_reg_rtx (outmode);
4162 emit_group_store (temp, valreg, NULL_TREE,
4163 GET_MODE_SIZE (outmode));
4164 valreg = temp;
4167 temp = gen_reg_rtx (GET_MODE (valreg));
4169 /* Construct an "equal form" for the value which mentions all the
4170 arguments in order as well as the function name. */
4171 for (i = 0; i < nargs; i++)
4172 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
4173 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
4175 insns = get_insns ();
4176 end_sequence ();
4178 if (flags & ECF_PURE)
4179 note = gen_rtx_EXPR_LIST (VOIDmode,
4180 gen_rtx_USE (VOIDmode,
4181 gen_rtx_MEM (BLKmode,
4182 gen_rtx_SCRATCH (VOIDmode))),
4183 note);
4185 emit_libcall_block (insns, temp, valreg, note);
4187 valreg = temp;
4190 pop_temp_slots ();
4192 /* Copy the value to the right place. */
4193 if (outmode != VOIDmode && retval)
4195 if (mem_value)
4197 if (value == 0)
4198 value = mem_value;
4199 if (value != mem_value)
4200 emit_move_insn (value, mem_value);
4202 else if (GET_CODE (valreg) == PARALLEL)
4204 if (value == 0)
4205 value = gen_reg_rtx (outmode);
4206 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
4208 else if (value != 0)
4209 emit_move_insn (value, valreg);
4210 else
4211 value = valreg;
4214 if (ACCUMULATE_OUTGOING_ARGS)
4216 #ifdef REG_PARM_STACK_SPACE
4217 if (save_area)
4218 restore_fixed_argument_area (save_area, argblock,
4219 high_to_save, low_to_save);
4220 #endif
4222 /* If we saved any argument areas, restore them. */
4223 for (count = 0; count < nargs; count++)
4224 if (argvec[count].save_area)
4226 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
4227 rtx adr = plus_constant (argblock,
4228 argvec[count].locate.offset.constant);
4229 rtx stack_area = gen_rtx_MEM (save_mode,
4230 memory_address (save_mode, adr));
4232 emit_move_insn (stack_area, argvec[count].save_area);
4235 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4236 stack_usage_map = initial_stack_usage_map;
4239 return value;
4243 /* Output a library call to function FUN (a SYMBOL_REF rtx)
4244 (emitting the queue unless NO_QUEUE is nonzero),
4245 for a value of mode OUTMODE,
4246 with NARGS different arguments, passed as alternating rtx values
4247 and machine_modes to convert them to.
4248 The rtx values should have been passed through protect_from_queue already.
4250 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
4251 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
4252 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
4253 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
4254 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
4255 or other LCT_ value for other types of library calls. */
4257 void
4258 emit_library_call (rtx orgfun, enum libcall_type fn_type,
4259 enum machine_mode outmode, int nargs, ...)
4261 va_list p;
4263 va_start (p, nargs);
4264 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4265 va_end (p);
4268 /* Like emit_library_call except that an extra argument, VALUE,
4269 comes second and says where to store the result.
4270 (If VALUE is zero, this function chooses a convenient way
4271 to return the value.
4273 This function returns an rtx for where the value is to be found.
4274 If VALUE is nonzero, VALUE is returned. */
4277 emit_library_call_value (rtx orgfun, rtx value,
4278 enum libcall_type fn_type,
4279 enum machine_mode outmode, int nargs, ...)
4281 rtx result;
4282 va_list p;
4284 va_start (p, nargs);
4285 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4286 nargs, p);
4287 va_end (p);
4289 return result;
4292 /* Store a single argument for a function call
4293 into the register or memory area where it must be passed.
4294 *ARG describes the argument value and where to pass it.
4296 ARGBLOCK is the address of the stack-block for all the arguments,
4297 or 0 on a machine where arguments are pushed individually.
4299 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4300 so must be careful about how the stack is used.
4302 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4303 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4304 that we need not worry about saving and restoring the stack.
4306 FNDECL is the declaration of the function we are calling.
4308 Return nonzero if this arg should cause sibcall failure,
4309 zero otherwise. */
4311 static int
4312 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4313 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4315 tree pval = arg->tree_value;
4316 rtx reg = 0;
4317 int partial = 0;
4318 int used = 0;
4319 int i, lower_bound = 0, upper_bound = 0;
4320 int sibcall_failure = 0;
4322 if (TREE_CODE (pval) == ERROR_MARK)
4323 return 1;
4325 /* Push a new temporary level for any temporaries we make for
4326 this argument. */
4327 push_temp_slots ();
4329 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4331 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4332 save any previous data at that location. */
4333 if (argblock && ! variable_size && arg->stack)
4335 #ifdef ARGS_GROW_DOWNWARD
4336 /* stack_slot is negative, but we want to index stack_usage_map
4337 with positive values. */
4338 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4339 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4340 else
4341 upper_bound = 0;
4343 lower_bound = upper_bound - arg->locate.size.constant;
4344 #else
4345 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4346 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4347 else
4348 lower_bound = 0;
4350 upper_bound = lower_bound + arg->locate.size.constant;
4351 #endif
4353 i = lower_bound;
4354 /* Don't worry about things in the fixed argument area;
4355 it has already been saved. */
4356 if (i < reg_parm_stack_space)
4357 i = reg_parm_stack_space;
4358 while (i < upper_bound && stack_usage_map[i] == 0)
4359 i++;
4361 if (i < upper_bound)
4363 /* We need to make a save area. */
4364 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4365 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4366 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4367 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4369 if (save_mode == BLKmode)
4371 tree ot = TREE_TYPE (arg->tree_value);
4372 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4373 | TYPE_QUAL_CONST));
4375 arg->save_area = assign_temp (nt, 0, 1, 1);
4376 preserve_temp_slots (arg->save_area);
4377 emit_block_move (validize_mem (arg->save_area), stack_area,
4378 expr_size (arg->tree_value),
4379 BLOCK_OP_CALL_PARM);
4381 else
4383 arg->save_area = gen_reg_rtx (save_mode);
4384 emit_move_insn (arg->save_area, stack_area);
4390 /* If this isn't going to be placed on both the stack and in registers,
4391 set up the register and number of words. */
4392 if (! arg->pass_on_stack)
4394 if (flags & ECF_SIBCALL)
4395 reg = arg->tail_call_reg;
4396 else
4397 reg = arg->reg;
4398 partial = arg->partial;
4401 if (reg != 0 && partial == 0)
4402 /* Being passed entirely in a register. We shouldn't be called in
4403 this case. */
4404 abort ();
4406 /* If this arg needs special alignment, don't load the registers
4407 here. */
4408 if (arg->n_aligned_regs != 0)
4409 reg = 0;
4411 /* If this is being passed partially in a register, we can't evaluate
4412 it directly into its stack slot. Otherwise, we can. */
4413 if (arg->value == 0)
4415 /* stack_arg_under_construction is nonzero if a function argument is
4416 being evaluated directly into the outgoing argument list and
4417 expand_call must take special action to preserve the argument list
4418 if it is called recursively.
4420 For scalar function arguments stack_usage_map is sufficient to
4421 determine which stack slots must be saved and restored. Scalar
4422 arguments in general have pass_on_stack == 0.
4424 If this argument is initialized by a function which takes the
4425 address of the argument (a C++ constructor or a C function
4426 returning a BLKmode structure), then stack_usage_map is
4427 insufficient and expand_call must push the stack around the
4428 function call. Such arguments have pass_on_stack == 1.
4430 Note that it is always safe to set stack_arg_under_construction,
4431 but this generates suboptimal code if set when not needed. */
4433 if (arg->pass_on_stack)
4434 stack_arg_under_construction++;
4436 arg->value = expand_expr (pval,
4437 (partial
4438 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4439 ? NULL_RTX : arg->stack,
4440 VOIDmode, EXPAND_STACK_PARM);
4442 /* If we are promoting object (or for any other reason) the mode
4443 doesn't agree, convert the mode. */
4445 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4446 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4447 arg->value, arg->unsignedp);
4449 if (arg->pass_on_stack)
4450 stack_arg_under_construction--;
4453 /* Don't allow anything left on stack from computation
4454 of argument to alloca. */
4455 if (flags & ECF_MAY_BE_ALLOCA)
4456 do_pending_stack_adjust ();
4458 if (arg->value == arg->stack)
4459 /* If the value is already in the stack slot, we are done. */
4461 else if (arg->mode != BLKmode)
4463 int size;
4465 /* Argument is a scalar, not entirely passed in registers.
4466 (If part is passed in registers, arg->partial says how much
4467 and emit_push_insn will take care of putting it there.)
4469 Push it, and if its size is less than the
4470 amount of space allocated to it,
4471 also bump stack pointer by the additional space.
4472 Note that in C the default argument promotions
4473 will prevent such mismatches. */
4475 size = GET_MODE_SIZE (arg->mode);
4476 /* Compute how much space the push instruction will push.
4477 On many machines, pushing a byte will advance the stack
4478 pointer by a halfword. */
4479 #ifdef PUSH_ROUNDING
4480 size = PUSH_ROUNDING (size);
4481 #endif
4482 used = size;
4484 /* Compute how much space the argument should get:
4485 round up to a multiple of the alignment for arguments. */
4486 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4487 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4488 / (PARM_BOUNDARY / BITS_PER_UNIT))
4489 * (PARM_BOUNDARY / BITS_PER_UNIT));
4491 /* This isn't already where we want it on the stack, so put it there.
4492 This can either be done with push or copy insns. */
4493 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4494 PARM_BOUNDARY, partial, reg, used - size, argblock,
4495 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4496 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4498 /* Unless this is a partially-in-register argument, the argument is now
4499 in the stack. */
4500 if (partial == 0)
4501 arg->value = arg->stack;
4503 else
4505 /* BLKmode, at least partly to be pushed. */
4507 unsigned int parm_align;
4508 int excess;
4509 rtx size_rtx;
4511 /* Pushing a nonscalar.
4512 If part is passed in registers, PARTIAL says how much
4513 and emit_push_insn will take care of putting it there. */
4515 /* Round its size up to a multiple
4516 of the allocation unit for arguments. */
4518 if (arg->locate.size.var != 0)
4520 excess = 0;
4521 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4523 else
4525 /* PUSH_ROUNDING has no effect on us, because
4526 emit_push_insn for BLKmode is careful to avoid it. */
4527 excess = (arg->locate.size.constant
4528 - int_size_in_bytes (TREE_TYPE (pval))
4529 + partial * UNITS_PER_WORD);
4530 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4531 NULL_RTX, TYPE_MODE (sizetype), 0);
4534 /* Some types will require stricter alignment, which will be
4535 provided for elsewhere in argument layout. */
4536 parm_align = MAX (PARM_BOUNDARY, TYPE_ALIGN (TREE_TYPE (pval)));
4538 /* When an argument is padded down, the block is aligned to
4539 PARM_BOUNDARY, but the actual argument isn't. */
4540 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4542 if (arg->locate.size.var)
4543 parm_align = BITS_PER_UNIT;
4544 else if (excess)
4546 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4547 parm_align = MIN (parm_align, excess_align);
4551 if ((flags & ECF_SIBCALL) && GET_CODE (arg->value) == MEM)
4553 /* emit_push_insn might not work properly if arg->value and
4554 argblock + arg->locate.offset areas overlap. */
4555 rtx x = arg->value;
4556 int i = 0;
4558 if (XEXP (x, 0) == current_function_internal_arg_pointer
4559 || (GET_CODE (XEXP (x, 0)) == PLUS
4560 && XEXP (XEXP (x, 0), 0) ==
4561 current_function_internal_arg_pointer
4562 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4564 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4565 i = INTVAL (XEXP (XEXP (x, 0), 1));
4567 /* expand_call should ensure this */
4568 if (arg->locate.offset.var || GET_CODE (size_rtx) != CONST_INT)
4569 abort ();
4571 if (arg->locate.offset.constant > i)
4573 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4574 sibcall_failure = 1;
4576 else if (arg->locate.offset.constant < i)
4578 if (i < arg->locate.offset.constant + INTVAL (size_rtx))
4579 sibcall_failure = 1;
4584 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4585 parm_align, partial, reg, excess, argblock,
4586 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4587 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4589 /* Unless this is a partially-in-register argument, the argument is now
4590 in the stack.
4592 ??? Unlike the case above, in which we want the actual
4593 address of the data, so that we can load it directly into a
4594 register, here we want the address of the stack slot, so that
4595 it's properly aligned for word-by-word copying or something
4596 like that. It's not clear that this is always correct. */
4597 if (partial == 0)
4598 arg->value = arg->stack_slot;
4601 /* Mark all slots this store used. */
4602 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4603 && argblock && ! variable_size && arg->stack)
4604 for (i = lower_bound; i < upper_bound; i++)
4605 stack_usage_map[i] = 1;
4607 /* Once we have pushed something, pops can't safely
4608 be deferred during the rest of the arguments. */
4609 NO_DEFER_POP;
4611 /* ANSI doesn't require a sequence point here,
4612 but PCC has one, so this will avoid some problems. */
4613 emit_queue ();
4615 /* Free any temporary slots made in processing this argument. Show
4616 that we might have taken the address of something and pushed that
4617 as an operand. */
4618 preserve_temp_slots (NULL_RTX);
4619 free_temp_slots ();
4620 pop_temp_slots ();
4622 return sibcall_failure;
4625 /* Nonzero if we do not know how to pass TYPE solely in registers.
4626 We cannot do so in the following cases:
4628 - if the type has variable size
4629 - if the type is marked as addressable (it is required to be constructed
4630 into the stack)
4631 - if the padding and mode of the type is such that a copy into a register
4632 would put it into the wrong part of the register.
4634 Which padding can't be supported depends on the byte endianness.
4636 A value in a register is implicitly padded at the most significant end.
4637 On a big-endian machine, that is the lower end in memory.
4638 So a value padded in memory at the upper end can't go in a register.
4639 For a little-endian machine, the reverse is true. */
4641 bool
4642 default_must_pass_in_stack (enum machine_mode mode, tree type)
4644 if (!type)
4645 return false;
4647 /* If the type has variable size... */
4648 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4649 return true;
4651 /* If the type is marked as addressable (it is required
4652 to be constructed into the stack)... */
4653 if (TREE_ADDRESSABLE (type))
4654 return true;
4656 /* If the padding and mode of the type is such that a copy into
4657 a register would put it into the wrong part of the register. */
4658 if (mode == BLKmode
4659 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4660 && (FUNCTION_ARG_PADDING (mode, type)
4661 == (BYTES_BIG_ENDIAN ? upward : downward)))
4662 return true;
4664 return false;