* doc/invoke.texi (-O2): Doesn't enable -fweb.
[official-gcc.git] / gcc / calls.c
blob79d9b9c445253e862f2e362b58d02222af13efce
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "expr.h"
30 #include "optabs.h"
31 #include "libfuncs.h"
32 #include "function.h"
33 #include "regs.h"
34 #include "toplev.h"
35 #include "output.h"
36 #include "tm_p.h"
37 #include "timevar.h"
38 #include "sbitmap.h"
39 #include "langhooks.h"
40 #include "target.h"
41 #include "cgraph.h"
42 #include "except.h"
44 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
45 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
47 /* Data structure and subroutines used within expand_call. */
49 struct arg_data
51 /* Tree node for this argument. */
52 tree tree_value;
53 /* Mode for value; TYPE_MODE unless promoted. */
54 enum machine_mode mode;
55 /* Current RTL value for argument, or 0 if it isn't precomputed. */
56 rtx value;
57 /* Initially-compute RTL value for argument; only for const functions. */
58 rtx initial_value;
59 /* Register to pass this argument in, 0 if passed on stack, or an
60 PARALLEL if the arg is to be copied into multiple non-contiguous
61 registers. */
62 rtx reg;
63 /* Register to pass this argument in when generating tail call sequence.
64 This is not the same register as for normal calls on machines with
65 register windows. */
66 rtx tail_call_reg;
67 /* If REG was promoted from the actual mode of the argument expression,
68 indicates whether the promotion is sign- or zero-extended. */
69 int unsignedp;
70 /* Number of registers to use. 0 means put the whole arg in registers.
71 Also 0 if not passed in registers. */
72 int partial;
73 /* Nonzero if argument must be passed on stack.
74 Note that some arguments may be passed on the stack
75 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
76 pass_on_stack identifies arguments that *cannot* go in registers. */
77 int pass_on_stack;
78 /* Some fields packaged up for locate_and_pad_parm. */
79 struct locate_and_pad_arg_data locate;
80 /* Location on the stack at which parameter should be stored. The store
81 has already been done if STACK == VALUE. */
82 rtx stack;
83 /* Location on the stack of the start of this argument slot. This can
84 differ from STACK if this arg pads downward. This location is known
85 to be aligned to FUNCTION_ARG_BOUNDARY. */
86 rtx stack_slot;
87 /* Place that this stack area has been saved, if needed. */
88 rtx save_area;
89 /* If an argument's alignment does not permit direct copying into registers,
90 copy in smaller-sized pieces into pseudos. These are stored in a
91 block pointed to by this field. The next field says how many
92 word-sized pseudos we made. */
93 rtx *aligned_regs;
94 int n_aligned_regs;
97 /* A vector of one char per byte of stack space. A byte if nonzero if
98 the corresponding stack location has been used.
99 This vector is used to prevent a function call within an argument from
100 clobbering any stack already set up. */
101 static char *stack_usage_map;
103 /* Size of STACK_USAGE_MAP. */
104 static int highest_outgoing_arg_in_use;
106 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
107 stack location's tail call argument has been already stored into the stack.
108 This bitmap is used to prevent sibling call optimization if function tries
109 to use parent's incoming argument slots when they have been already
110 overwritten with tail call arguments. */
111 static sbitmap stored_args_map;
113 /* stack_arg_under_construction is nonzero when an argument may be
114 initialized with a constructor call (including a C function that
115 returns a BLKmode struct) and expand_call must take special action
116 to make sure the object being constructed does not overlap the
117 argument list for the constructor call. */
118 int stack_arg_under_construction;
120 static int calls_function (tree, int);
121 static int calls_function_1 (tree, int);
123 static void emit_call_1 (rtx, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
124 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
125 CUMULATIVE_ARGS *);
126 static void precompute_register_parameters (int, struct arg_data *, int *);
127 static int store_one_arg (struct arg_data *, rtx, int, int, int);
128 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
129 static int finalize_must_preallocate (int, int, struct arg_data *,
130 struct args_size *);
131 static void precompute_arguments (int, int, struct arg_data *);
132 static int compute_argument_block_size (int, struct args_size *, int);
133 static void initialize_argument_information (int, struct arg_data *,
134 struct args_size *, int, tree,
135 tree, CUMULATIVE_ARGS *, int,
136 rtx *, int *, int *, int *);
137 static void compute_argument_addresses (struct arg_data *, rtx, int);
138 static rtx rtx_for_function_call (tree, tree);
139 static void load_register_parameters (struct arg_data *, int, rtx *, int,
140 int, int *);
141 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
142 enum machine_mode, int, va_list);
143 static int special_function_p (tree, int);
144 static rtx try_to_integrate (tree, tree, rtx, int, tree, rtx);
145 static int check_sibcall_argument_overlap_1 (rtx);
146 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
148 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
149 int);
150 static tree fix_unsafe_tree (tree);
151 static bool shift_returned_value (tree, rtx *);
153 #ifdef REG_PARM_STACK_SPACE
154 static rtx save_fixed_argument_area (int, rtx, int *, int *);
155 static void restore_fixed_argument_area (rtx, rtx, int, int);
156 #endif
158 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
159 `alloca'.
161 If WHICH is 0, return 1 if EXP contains a call to any function.
162 Actually, we only need return 1 if evaluating EXP would require pushing
163 arguments on the stack, but that is too difficult to compute, so we just
164 assume any function call might require the stack. */
166 static tree calls_function_save_exprs;
168 static int
169 calls_function (tree exp, int which)
171 int val;
173 calls_function_save_exprs = 0;
174 val = calls_function_1 (exp, which);
175 calls_function_save_exprs = 0;
176 return val;
179 /* Recursive function to do the work of above function. */
181 static int
182 calls_function_1 (tree exp, int which)
184 int i;
185 enum tree_code code = TREE_CODE (exp);
186 int class = TREE_CODE_CLASS (code);
187 int length = first_rtl_op (code);
189 /* If this code is language-specific, we don't know what it will do. */
190 if ((int) code >= NUM_TREE_CODES)
191 return 1;
193 switch (code)
195 case CALL_EXPR:
196 if (which == 0)
197 return 1;
198 else if ((TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
199 == FUNCTION_TYPE)
200 && (TYPE_RETURNS_STACK_DEPRESSED
201 (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
202 return 1;
203 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
204 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
205 == FUNCTION_DECL)
206 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
208 & ECF_MAY_BE_ALLOCA))
209 return 1;
211 break;
213 case CONSTRUCTOR:
215 tree tem;
217 for (tem = CONSTRUCTOR_ELTS (exp); tem != 0; tem = TREE_CHAIN (tem))
218 if (calls_function_1 (TREE_VALUE (tem), which))
219 return 1;
222 return 0;
224 case SAVE_EXPR:
225 if (SAVE_EXPR_RTL (exp) != 0)
226 return 0;
227 if (value_member (exp, calls_function_save_exprs))
228 return 0;
229 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
230 calls_function_save_exprs);
231 return (TREE_OPERAND (exp, 0) != 0
232 && calls_function_1 (TREE_OPERAND (exp, 0), which));
234 case BLOCK:
236 tree local;
237 tree subblock;
239 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
240 if (DECL_INITIAL (local) != 0
241 && calls_function_1 (DECL_INITIAL (local), which))
242 return 1;
244 for (subblock = BLOCK_SUBBLOCKS (exp);
245 subblock;
246 subblock = TREE_CHAIN (subblock))
247 if (calls_function_1 (subblock, which))
248 return 1;
250 return 0;
252 case TREE_LIST:
253 for (; exp != 0; exp = TREE_CHAIN (exp))
254 if (calls_function_1 (TREE_VALUE (exp), which))
255 return 1;
256 return 0;
258 default:
259 break;
262 /* Only expressions and blocks can contain calls. */
263 if (! IS_EXPR_CODE_CLASS (class) && class != 'b')
264 return 0;
266 for (i = 0; i < length; i++)
267 if (TREE_OPERAND (exp, i) != 0
268 && calls_function_1 (TREE_OPERAND (exp, i), which))
269 return 1;
271 return 0;
274 /* Force FUNEXP into a form suitable for the address of a CALL,
275 and return that as an rtx. Also load the static chain register
276 if FNDECL is a nested function.
278 CALL_FUSAGE points to a variable holding the prospective
279 CALL_INSN_FUNCTION_USAGE information. */
282 prepare_call_address (rtx funexp, tree fndecl, rtx *call_fusage,
283 int reg_parm_seen, int sibcallp)
285 rtx static_chain_value = 0;
287 funexp = protect_from_queue (funexp, 0);
289 if (fndecl != 0)
290 /* Get possible static chain value for nested function in C. */
291 static_chain_value = lookup_static_chain (fndecl);
293 /* Make a valid memory address and copy constants thru pseudo-regs,
294 but not for a constant address if -fno-function-cse. */
295 if (GET_CODE (funexp) != SYMBOL_REF)
296 /* If we are using registers for parameters, force the
297 function address into a register now. */
298 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
299 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
300 : memory_address (FUNCTION_MODE, funexp));
301 else if (! sibcallp)
303 #ifndef NO_FUNCTION_CSE
304 if (optimize && ! flag_no_function_cse)
305 #ifdef NO_RECURSIVE_FUNCTION_CSE
306 if (fndecl != current_function_decl)
307 #endif
308 funexp = force_reg (Pmode, funexp);
309 #endif
312 if (static_chain_value != 0)
314 emit_move_insn (static_chain_rtx, static_chain_value);
316 if (GET_CODE (static_chain_rtx) == REG)
317 use_reg (call_fusage, static_chain_rtx);
320 return funexp;
323 /* Generate instructions to call function FUNEXP,
324 and optionally pop the results.
325 The CALL_INSN is the first insn generated.
327 FNDECL is the declaration node of the function. This is given to the
328 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
330 FUNTYPE is the data type of the function. This is given to the macro
331 RETURN_POPS_ARGS to determine whether this function pops its own args.
332 We used to allow an identifier for library functions, but that doesn't
333 work when the return type is an aggregate type and the calling convention
334 says that the pointer to this aggregate is to be popped by the callee.
336 STACK_SIZE is the number of bytes of arguments on the stack,
337 ROUNDED_STACK_SIZE is that number rounded up to
338 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
339 both to put into the call insn and to generate explicit popping
340 code if necessary.
342 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
343 It is zero if this call doesn't want a structure value.
345 NEXT_ARG_REG is the rtx that results from executing
346 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
347 just after all the args have had their registers assigned.
348 This could be whatever you like, but normally it is the first
349 arg-register beyond those used for args in this call,
350 or 0 if all the arg-registers are used in this call.
351 It is passed on to `gen_call' so you can put this info in the call insn.
353 VALREG is a hard register in which a value is returned,
354 or 0 if the call does not return a value.
356 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
357 the args to this call were processed.
358 We restore `inhibit_defer_pop' to that value.
360 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
361 denote registers used by the called function. */
363 static void
364 emit_call_1 (rtx funexp, tree fndecl ATTRIBUTE_UNUSED, tree funtype ATTRIBUTE_UNUSED,
365 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
366 HOST_WIDE_INT rounded_stack_size,
367 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
368 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
369 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
370 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
372 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
373 rtx call_insn;
374 int already_popped = 0;
375 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
376 #if defined (HAVE_call) && defined (HAVE_call_value)
377 rtx struct_value_size_rtx;
378 struct_value_size_rtx = GEN_INT (struct_value_size);
379 #endif
381 #ifdef CALL_POPS_ARGS
382 n_popped += CALL_POPS_ARGS (* args_so_far);
383 #endif
385 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
386 and we don't want to load it into a register as an optimization,
387 because prepare_call_address already did it if it should be done. */
388 if (GET_CODE (funexp) != SYMBOL_REF)
389 funexp = memory_address (FUNCTION_MODE, funexp);
391 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
392 if ((ecf_flags & ECF_SIBCALL)
393 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
394 && (n_popped > 0 || stack_size == 0))
396 rtx n_pop = GEN_INT (n_popped);
397 rtx pat;
399 /* If this subroutine pops its own args, record that in the call insn
400 if possible, for the sake of frame pointer elimination. */
402 if (valreg)
403 pat = GEN_SIBCALL_VALUE_POP (valreg,
404 gen_rtx_MEM (FUNCTION_MODE, funexp),
405 rounded_stack_size_rtx, next_arg_reg,
406 n_pop);
407 else
408 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
409 rounded_stack_size_rtx, next_arg_reg, n_pop);
411 emit_call_insn (pat);
412 already_popped = 1;
414 else
415 #endif
417 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
418 /* If the target has "call" or "call_value" insns, then prefer them
419 if no arguments are actually popped. If the target does not have
420 "call" or "call_value" insns, then we must use the popping versions
421 even if the call has no arguments to pop. */
422 #if defined (HAVE_call) && defined (HAVE_call_value)
423 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
424 && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
425 #else
426 if (HAVE_call_pop && HAVE_call_value_pop)
427 #endif
429 rtx n_pop = GEN_INT (n_popped);
430 rtx pat;
432 /* If this subroutine pops its own args, record that in the call insn
433 if possible, for the sake of frame pointer elimination. */
435 if (valreg)
436 pat = GEN_CALL_VALUE_POP (valreg,
437 gen_rtx_MEM (FUNCTION_MODE, funexp),
438 rounded_stack_size_rtx, next_arg_reg, n_pop);
439 else
440 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
441 rounded_stack_size_rtx, next_arg_reg, n_pop);
443 emit_call_insn (pat);
444 already_popped = 1;
446 else
447 #endif
449 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
450 if ((ecf_flags & ECF_SIBCALL)
451 && HAVE_sibcall && HAVE_sibcall_value)
453 if (valreg)
454 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
455 gen_rtx_MEM (FUNCTION_MODE, funexp),
456 rounded_stack_size_rtx,
457 next_arg_reg, NULL_RTX));
458 else
459 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
460 rounded_stack_size_rtx, next_arg_reg,
461 struct_value_size_rtx));
463 else
464 #endif
466 #if defined (HAVE_call) && defined (HAVE_call_value)
467 if (HAVE_call && HAVE_call_value)
469 if (valreg)
470 emit_call_insn (GEN_CALL_VALUE (valreg,
471 gen_rtx_MEM (FUNCTION_MODE, funexp),
472 rounded_stack_size_rtx, next_arg_reg,
473 NULL_RTX));
474 else
475 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
476 rounded_stack_size_rtx, next_arg_reg,
477 struct_value_size_rtx));
479 else
480 #endif
481 abort ();
483 /* Find the call we just emitted. */
484 call_insn = last_call_insn ();
486 /* Mark memory as used for "pure" function call. */
487 if (ecf_flags & ECF_PURE)
488 call_fusage
489 = gen_rtx_EXPR_LIST
490 (VOIDmode,
491 gen_rtx_USE (VOIDmode,
492 gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
493 call_fusage);
495 /* Put the register usage information there. */
496 add_function_usage_to (call_insn, call_fusage);
498 /* If this is a const call, then set the insn's unchanging bit. */
499 if (ecf_flags & (ECF_CONST | ECF_PURE))
500 CONST_OR_PURE_CALL_P (call_insn) = 1;
502 /* If this call can't throw, attach a REG_EH_REGION reg note to that
503 effect. */
504 if (ecf_flags & ECF_NOTHROW)
505 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
506 REG_NOTES (call_insn));
507 else
508 note_eh_region_may_contain_throw ();
510 if (ecf_flags & ECF_NORETURN)
511 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
512 REG_NOTES (call_insn));
513 if (ecf_flags & ECF_ALWAYS_RETURN)
514 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_ALWAYS_RETURN, const0_rtx,
515 REG_NOTES (call_insn));
517 if (ecf_flags & ECF_RETURNS_TWICE)
519 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
520 REG_NOTES (call_insn));
521 current_function_calls_setjmp = 1;
524 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
526 /* Restore this now, so that we do defer pops for this call's args
527 if the context of the call as a whole permits. */
528 inhibit_defer_pop = old_inhibit_defer_pop;
530 if (n_popped > 0)
532 if (!already_popped)
533 CALL_INSN_FUNCTION_USAGE (call_insn)
534 = gen_rtx_EXPR_LIST (VOIDmode,
535 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
536 CALL_INSN_FUNCTION_USAGE (call_insn));
537 rounded_stack_size -= n_popped;
538 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
539 stack_pointer_delta -= n_popped;
542 if (!ACCUMULATE_OUTGOING_ARGS)
544 /* If returning from the subroutine does not automatically pop the args,
545 we need an instruction to pop them sooner or later.
546 Perhaps do it now; perhaps just record how much space to pop later.
548 If returning from the subroutine does pop the args, indicate that the
549 stack pointer will be changed. */
551 if (rounded_stack_size != 0)
553 if (ecf_flags & (ECF_SP_DEPRESSED | ECF_NORETURN | ECF_LONGJMP))
554 /* Just pretend we did the pop. */
555 stack_pointer_delta -= rounded_stack_size;
556 else if (flag_defer_pop && inhibit_defer_pop == 0
557 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
558 pending_stack_adjust += rounded_stack_size;
559 else
560 adjust_stack (rounded_stack_size_rtx);
563 /* When we accumulate outgoing args, we must avoid any stack manipulations.
564 Restore the stack pointer to its original value now. Usually
565 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
566 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
567 popping variants of functions exist as well.
569 ??? We may optimize similar to defer_pop above, but it is
570 probably not worthwhile.
572 ??? It will be worthwhile to enable combine_stack_adjustments even for
573 such machines. */
574 else if (n_popped)
575 anti_adjust_stack (GEN_INT (n_popped));
578 /* Determine if the function identified by NAME and FNDECL is one with
579 special properties we wish to know about.
581 For example, if the function might return more than one time (setjmp), then
582 set RETURNS_TWICE to a nonzero value.
584 Similarly set LONGJMP for if the function is in the longjmp family.
586 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
587 space from the stack such as alloca. */
589 static int
590 special_function_p (tree fndecl, int flags)
592 if (! (flags & ECF_MALLOC)
593 && fndecl && DECL_NAME (fndecl)
594 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
595 /* Exclude functions not at the file scope, or not `extern',
596 since they are not the magic functions we would otherwise
597 think they are.
598 FIXME: this should be handled with attributes, not with this
599 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
600 because you can declare fork() inside a function if you
601 wish. */
602 && (DECL_CONTEXT (fndecl) == NULL_TREE
603 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
604 && TREE_PUBLIC (fndecl))
606 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
607 const char *tname = name;
609 /* We assume that alloca will always be called by name. It
610 makes no sense to pass it as a pointer-to-function to
611 anything that does not understand its behavior. */
612 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
613 && name[0] == 'a'
614 && ! strcmp (name, "alloca"))
615 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
616 && name[0] == '_'
617 && ! strcmp (name, "__builtin_alloca"))))
618 flags |= ECF_MAY_BE_ALLOCA;
620 /* Disregard prefix _, __ or __x. */
621 if (name[0] == '_')
623 if (name[1] == '_' && name[2] == 'x')
624 tname += 3;
625 else if (name[1] == '_')
626 tname += 2;
627 else
628 tname += 1;
631 if (tname[0] == 's')
633 if ((tname[1] == 'e'
634 && (! strcmp (tname, "setjmp")
635 || ! strcmp (tname, "setjmp_syscall")))
636 || (tname[1] == 'i'
637 && ! strcmp (tname, "sigsetjmp"))
638 || (tname[1] == 'a'
639 && ! strcmp (tname, "savectx")))
640 flags |= ECF_RETURNS_TWICE;
642 if (tname[1] == 'i'
643 && ! strcmp (tname, "siglongjmp"))
644 flags |= ECF_LONGJMP;
646 else if ((tname[0] == 'q' && tname[1] == 's'
647 && ! strcmp (tname, "qsetjmp"))
648 || (tname[0] == 'v' && tname[1] == 'f'
649 && ! strcmp (tname, "vfork")))
650 flags |= ECF_RETURNS_TWICE;
652 else if (tname[0] == 'l' && tname[1] == 'o'
653 && ! strcmp (tname, "longjmp"))
654 flags |= ECF_LONGJMP;
656 else if ((tname[0] == 'f' && tname[1] == 'o'
657 && ! strcmp (tname, "fork"))
658 /* Linux specific: __clone. check NAME to insist on the
659 leading underscores, to avoid polluting the ISO / POSIX
660 namespace. */
661 || (name[0] == '_' && name[1] == '_'
662 && ! strcmp (tname, "clone"))
663 || (tname[0] == 'e' && tname[1] == 'x' && tname[2] == 'e'
664 && tname[3] == 'c' && (tname[4] == 'l' || tname[4] == 'v')
665 && (tname[5] == '\0'
666 || ((tname[5] == 'p' || tname[5] == 'e')
667 && tname[6] == '\0'))))
668 flags |= ECF_FORK_OR_EXEC;
670 return flags;
673 /* Return nonzero when tree represent call to longjmp. */
676 setjmp_call_p (tree fndecl)
678 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
681 /* Return true when exp contains alloca call. */
682 bool
683 alloca_call_p (tree exp)
685 if (TREE_CODE (exp) == CALL_EXPR
686 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
687 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
688 == FUNCTION_DECL)
689 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
690 0) & ECF_MAY_BE_ALLOCA))
691 return true;
692 return false;
695 /* Detect flags (function attributes) from the function decl or type node. */
698 flags_from_decl_or_type (tree exp)
700 int flags = 0;
701 tree type = exp;
703 if (DECL_P (exp))
705 struct cgraph_rtl_info *i = cgraph_rtl_info (exp);
706 type = TREE_TYPE (exp);
708 if (i)
710 if (i->pure_function)
711 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
712 if (i->const_function)
713 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
716 /* The function exp may have the `malloc' attribute. */
717 if (DECL_IS_MALLOC (exp))
718 flags |= ECF_MALLOC;
720 /* The function exp may have the `pure' attribute. */
721 if (DECL_IS_PURE (exp))
722 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
724 if (TREE_NOTHROW (exp))
725 flags |= ECF_NOTHROW;
727 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
728 flags |= ECF_LIBCALL_BLOCK;
731 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
732 flags |= ECF_CONST;
734 if (TREE_THIS_VOLATILE (exp))
735 flags |= ECF_NORETURN;
737 /* Mark if the function returns with the stack pointer depressed. We
738 cannot consider it pure or constant in that case. */
739 if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
741 flags |= ECF_SP_DEPRESSED;
742 flags &= ~(ECF_PURE | ECF_CONST | ECF_LIBCALL_BLOCK);
745 return flags;
748 /* Detect flags from a CALL_EXPR. */
751 call_expr_flags (tree t)
753 int flags;
754 tree decl = get_callee_fndecl (t);
756 if (decl)
757 flags = flags_from_decl_or_type (decl);
758 else
760 t = TREE_TYPE (TREE_OPERAND (t, 0));
761 if (t && TREE_CODE (t) == POINTER_TYPE)
762 flags = flags_from_decl_or_type (TREE_TYPE (t));
763 else
764 flags = 0;
767 return flags;
770 /* Precompute all register parameters as described by ARGS, storing values
771 into fields within the ARGS array.
773 NUM_ACTUALS indicates the total number elements in the ARGS array.
775 Set REG_PARM_SEEN if we encounter a register parameter. */
777 static void
778 precompute_register_parameters (int num_actuals, struct arg_data *args, int *reg_parm_seen)
780 int i;
782 *reg_parm_seen = 0;
784 for (i = 0; i < num_actuals; i++)
785 if (args[i].reg != 0 && ! args[i].pass_on_stack)
787 *reg_parm_seen = 1;
789 if (args[i].value == 0)
791 push_temp_slots ();
792 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
793 VOIDmode, 0);
794 preserve_temp_slots (args[i].value);
795 pop_temp_slots ();
797 /* ANSI doesn't require a sequence point here,
798 but PCC has one, so this will avoid some problems. */
799 emit_queue ();
802 /* If the value is a non-legitimate constant, force it into a
803 pseudo now. TLS symbols sometimes need a call to resolve. */
804 if (CONSTANT_P (args[i].value)
805 && !LEGITIMATE_CONSTANT_P (args[i].value))
806 args[i].value = force_reg (args[i].mode, args[i].value);
808 /* If we are to promote the function arg to a wider mode,
809 do it now. */
811 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
812 args[i].value
813 = convert_modes (args[i].mode,
814 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
815 args[i].value, args[i].unsignedp);
817 /* If the value is expensive, and we are inside an appropriately
818 short loop, put the value into a pseudo and then put the pseudo
819 into the hard reg.
821 For small register classes, also do this if this call uses
822 register parameters. This is to avoid reload conflicts while
823 loading the parameters registers. */
825 if ((! (GET_CODE (args[i].value) == REG
826 || (GET_CODE (args[i].value) == SUBREG
827 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
828 && args[i].mode != BLKmode
829 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
830 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
831 || preserve_subexpressions_p ()))
832 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
836 #ifdef REG_PARM_STACK_SPACE
838 /* The argument list is the property of the called routine and it
839 may clobber it. If the fixed area has been used for previous
840 parameters, we must save and restore it. */
842 static rtx
843 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
845 int low;
846 int high;
848 /* Compute the boundary of the area that needs to be saved, if any. */
849 high = reg_parm_stack_space;
850 #ifdef ARGS_GROW_DOWNWARD
851 high += 1;
852 #endif
853 if (high > highest_outgoing_arg_in_use)
854 high = highest_outgoing_arg_in_use;
856 for (low = 0; low < high; low++)
857 if (stack_usage_map[low] != 0)
859 int num_to_save;
860 enum machine_mode save_mode;
861 int delta;
862 rtx stack_area;
863 rtx save_area;
865 while (stack_usage_map[--high] == 0)
868 *low_to_save = low;
869 *high_to_save = high;
871 num_to_save = high - low + 1;
872 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
874 /* If we don't have the required alignment, must do this
875 in BLKmode. */
876 if ((low & (MIN (GET_MODE_SIZE (save_mode),
877 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
878 save_mode = BLKmode;
880 #ifdef ARGS_GROW_DOWNWARD
881 delta = -high;
882 #else
883 delta = low;
884 #endif
885 stack_area = gen_rtx_MEM (save_mode,
886 memory_address (save_mode,
887 plus_constant (argblock,
888 delta)));
890 set_mem_align (stack_area, PARM_BOUNDARY);
891 if (save_mode == BLKmode)
893 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
894 emit_block_move (validize_mem (save_area), stack_area,
895 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
897 else
899 save_area = gen_reg_rtx (save_mode);
900 emit_move_insn (save_area, stack_area);
903 return save_area;
906 return NULL_RTX;
909 static void
910 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
912 enum machine_mode save_mode = GET_MODE (save_area);
913 int delta;
914 rtx stack_area;
916 #ifdef ARGS_GROW_DOWNWARD
917 delta = -high_to_save;
918 #else
919 delta = low_to_save;
920 #endif
921 stack_area = gen_rtx_MEM (save_mode,
922 memory_address (save_mode,
923 plus_constant (argblock, delta)));
924 set_mem_align (stack_area, PARM_BOUNDARY);
926 if (save_mode != BLKmode)
927 emit_move_insn (stack_area, save_area);
928 else
929 emit_block_move (stack_area, validize_mem (save_area),
930 GEN_INT (high_to_save - low_to_save + 1),
931 BLOCK_OP_CALL_PARM);
933 #endif /* REG_PARM_STACK_SPACE */
935 /* If any elements in ARGS refer to parameters that are to be passed in
936 registers, but not in memory, and whose alignment does not permit a
937 direct copy into registers. Copy the values into a group of pseudos
938 which we will later copy into the appropriate hard registers.
940 Pseudos for each unaligned argument will be stored into the array
941 args[argnum].aligned_regs. The caller is responsible for deallocating
942 the aligned_regs array if it is nonzero. */
944 static void
945 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
947 int i, j;
949 for (i = 0; i < num_actuals; i++)
950 if (args[i].reg != 0 && ! args[i].pass_on_stack
951 && args[i].mode == BLKmode
952 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
953 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
955 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
956 int nregs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
957 int endian_correction = 0;
959 args[i].n_aligned_regs = args[i].partial ? args[i].partial : nregs;
960 args[i].aligned_regs = xmalloc (sizeof (rtx) * args[i].n_aligned_regs);
962 /* Structures smaller than a word are normally aligned to the
963 least significant byte. On a BYTES_BIG_ENDIAN machine,
964 this means we must skip the empty high order bytes when
965 calculating the bit offset. */
966 if (bytes < UNITS_PER_WORD
967 #ifdef BLOCK_REG_PADDING
968 && (BLOCK_REG_PADDING (args[i].mode,
969 TREE_TYPE (args[i].tree_value), 1)
970 == downward)
971 #else
972 && BYTES_BIG_ENDIAN
973 #endif
975 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
977 for (j = 0; j < args[i].n_aligned_regs; j++)
979 rtx reg = gen_reg_rtx (word_mode);
980 rtx word = operand_subword_force (args[i].value, j, BLKmode);
981 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
983 args[i].aligned_regs[j] = reg;
984 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
985 word_mode, word_mode, BITS_PER_WORD);
987 /* There is no need to restrict this code to loading items
988 in TYPE_ALIGN sized hunks. The bitfield instructions can
989 load up entire word sized registers efficiently.
991 ??? This may not be needed anymore.
992 We use to emit a clobber here but that doesn't let later
993 passes optimize the instructions we emit. By storing 0 into
994 the register later passes know the first AND to zero out the
995 bitfield being set in the register is unnecessary. The store
996 of 0 will be deleted as will at least the first AND. */
998 emit_move_insn (reg, const0_rtx);
1000 bytes -= bitsize / BITS_PER_UNIT;
1001 store_bit_field (reg, bitsize, endian_correction, word_mode,
1002 word, BITS_PER_WORD);
1007 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1008 ACTPARMS.
1010 NUM_ACTUALS is the total number of parameters.
1012 N_NAMED_ARGS is the total number of named arguments.
1014 FNDECL is the tree code for the target of this call (if known)
1016 ARGS_SO_FAR holds state needed by the target to know where to place
1017 the next argument.
1019 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1020 for arguments which are passed in registers.
1022 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1023 and may be modified by this routine.
1025 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1026 flags which may may be modified by this routine. */
1028 static void
1029 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
1030 struct arg_data *args,
1031 struct args_size *args_size,
1032 int n_named_args ATTRIBUTE_UNUSED,
1033 tree actparms, tree fndecl,
1034 CUMULATIVE_ARGS *args_so_far,
1035 int reg_parm_stack_space,
1036 rtx *old_stack_level, int *old_pending_adj,
1037 int *must_preallocate, int *ecf_flags)
1039 /* 1 if scanning parms front to back, -1 if scanning back to front. */
1040 int inc;
1042 /* Count arg position in order args appear. */
1043 int argpos;
1045 int i;
1046 tree p;
1048 args_size->constant = 0;
1049 args_size->var = 0;
1051 /* In this loop, we consider args in the order they are written.
1052 We fill up ARGS from the front or from the back if necessary
1053 so that in any case the first arg to be pushed ends up at the front. */
1055 if (PUSH_ARGS_REVERSED)
1057 i = num_actuals - 1, inc = -1;
1058 /* In this case, must reverse order of args
1059 so that we compute and push the last arg first. */
1061 else
1063 i = 0, inc = 1;
1066 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1067 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
1069 tree type = TREE_TYPE (TREE_VALUE (p));
1070 int unsignedp;
1071 enum machine_mode mode;
1073 args[i].tree_value = TREE_VALUE (p);
1075 /* Replace erroneous argument with constant zero. */
1076 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1077 args[i].tree_value = integer_zero_node, type = integer_type_node;
1079 /* If TYPE is a transparent union, pass things the way we would
1080 pass the first field of the union. We have already verified that
1081 the modes are the same. */
1082 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
1083 type = TREE_TYPE (TYPE_FIELDS (type));
1085 /* Decide where to pass this arg.
1087 args[i].reg is nonzero if all or part is passed in registers.
1089 args[i].partial is nonzero if part but not all is passed in registers,
1090 and the exact value says how many words are passed in registers.
1092 args[i].pass_on_stack is nonzero if the argument must at least be
1093 computed on the stack. It may then be loaded back into registers
1094 if args[i].reg is nonzero.
1096 These decisions are driven by the FUNCTION_... macros and must agree
1097 with those made by function.c. */
1099 /* See if this argument should be passed by invisible reference. */
1100 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
1101 || TREE_ADDRESSABLE (type)
1102 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1103 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
1104 type, argpos < n_named_args)
1105 #endif
1108 /* If we're compiling a thunk, pass through invisible
1109 references instead of making a copy. */
1110 if (current_function_is_thunk
1111 #ifdef FUNCTION_ARG_CALLEE_COPIES
1112 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
1113 type, argpos < n_named_args)
1114 /* If it's in a register, we must make a copy of it too. */
1115 /* ??? Is this a sufficient test? Is there a better one? */
1116 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1117 && REG_P (DECL_RTL (args[i].tree_value)))
1118 && ! TREE_ADDRESSABLE (type))
1119 #endif
1122 /* C++ uses a TARGET_EXPR to indicate that we want to make a
1123 new object from the argument. If we are passing by
1124 invisible reference, the callee will do that for us, so we
1125 can strip off the TARGET_EXPR. This is not always safe,
1126 but it is safe in the only case where this is a useful
1127 optimization; namely, when the argument is a plain object.
1128 In that case, the frontend is just asking the backend to
1129 make a bitwise copy of the argument. */
1131 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
1132 && (DECL_P (TREE_OPERAND (args[i].tree_value, 1)))
1133 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1134 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1136 args[i].tree_value = build1 (ADDR_EXPR,
1137 build_pointer_type (type),
1138 args[i].tree_value);
1139 type = build_pointer_type (type);
1141 else if (TREE_CODE (args[i].tree_value) == TARGET_EXPR)
1143 /* In the V3 C++ ABI, parameters are destroyed in the caller.
1144 We implement this by passing the address of the temporary
1145 rather than expanding it into another allocated slot. */
1146 args[i].tree_value = build1 (ADDR_EXPR,
1147 build_pointer_type (type),
1148 args[i].tree_value);
1149 type = build_pointer_type (type);
1151 else
1153 /* We make a copy of the object and pass the address to the
1154 function being called. */
1155 rtx copy;
1157 if (!COMPLETE_TYPE_P (type)
1158 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1159 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1160 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1161 STACK_CHECK_MAX_VAR_SIZE))))
1163 /* This is a variable-sized object. Make space on the stack
1164 for it. */
1165 rtx size_rtx = expr_size (TREE_VALUE (p));
1167 if (*old_stack_level == 0)
1169 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1170 *old_pending_adj = pending_stack_adjust;
1171 pending_stack_adjust = 0;
1174 copy = gen_rtx_MEM (BLKmode,
1175 allocate_dynamic_stack_space
1176 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1177 set_mem_attributes (copy, type, 1);
1179 else
1180 copy = assign_temp (type, 0, 1, 0);
1182 store_expr (args[i].tree_value, copy, 0);
1183 *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1185 args[i].tree_value = build1 (ADDR_EXPR,
1186 build_pointer_type (type),
1187 make_tree (type, copy));
1188 type = build_pointer_type (type);
1192 mode = TYPE_MODE (type);
1193 unsignedp = TREE_UNSIGNED (type);
1195 if (targetm.calls.promote_function_args (fndecl ? TREE_TYPE (fndecl) : 0))
1196 mode = promote_mode (type, mode, &unsignedp, 1);
1198 args[i].unsignedp = unsignedp;
1199 args[i].mode = mode;
1201 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1202 argpos < n_named_args);
1203 #ifdef FUNCTION_INCOMING_ARG
1204 /* If this is a sibling call and the machine has register windows, the
1205 register window has to be unwinded before calling the routine, so
1206 arguments have to go into the incoming registers. */
1207 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1208 argpos < n_named_args);
1209 #else
1210 args[i].tail_call_reg = args[i].reg;
1211 #endif
1213 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1214 if (args[i].reg)
1215 args[i].partial
1216 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1217 argpos < n_named_args);
1218 #endif
1220 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1222 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1223 it means that we are to pass this arg in the register(s) designated
1224 by the PARALLEL, but also to pass it in the stack. */
1225 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1226 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1227 args[i].pass_on_stack = 1;
1229 /* If this is an addressable type, we must preallocate the stack
1230 since we must evaluate the object into its final location.
1232 If this is to be passed in both registers and the stack, it is simpler
1233 to preallocate. */
1234 if (TREE_ADDRESSABLE (type)
1235 || (args[i].pass_on_stack && args[i].reg != 0))
1236 *must_preallocate = 1;
1238 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1239 we cannot consider this function call constant. */
1240 if (TREE_ADDRESSABLE (type))
1241 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1243 /* Compute the stack-size of this argument. */
1244 if (args[i].reg == 0 || args[i].partial != 0
1245 || reg_parm_stack_space > 0
1246 || args[i].pass_on_stack)
1247 locate_and_pad_parm (mode, type,
1248 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1250 #else
1251 args[i].reg != 0,
1252 #endif
1253 args[i].pass_on_stack ? 0 : args[i].partial,
1254 fndecl, args_size, &args[i].locate);
1255 #ifdef BLOCK_REG_PADDING
1256 else
1257 /* The argument is passed entirely in registers. See at which
1258 end it should be padded. */
1259 args[i].locate.where_pad =
1260 BLOCK_REG_PADDING (mode, type,
1261 int_size_in_bytes (type) <= UNITS_PER_WORD);
1262 #endif
1264 /* Update ARGS_SIZE, the total stack space for args so far. */
1266 args_size->constant += args[i].locate.size.constant;
1267 if (args[i].locate.size.var)
1268 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1270 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1271 have been used, etc. */
1273 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1274 argpos < n_named_args);
1278 /* Update ARGS_SIZE to contain the total size for the argument block.
1279 Return the original constant component of the argument block's size.
1281 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1282 for arguments passed in registers. */
1284 static int
1285 compute_argument_block_size (int reg_parm_stack_space,
1286 struct args_size *args_size,
1287 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1289 int unadjusted_args_size = args_size->constant;
1291 /* For accumulate outgoing args mode we don't need to align, since the frame
1292 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1293 backends from generating misaligned frame sizes. */
1294 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1295 preferred_stack_boundary = STACK_BOUNDARY;
1297 /* Compute the actual size of the argument block required. The variable
1298 and constant sizes must be combined, the size may have to be rounded,
1299 and there may be a minimum required size. */
1301 if (args_size->var)
1303 args_size->var = ARGS_SIZE_TREE (*args_size);
1304 args_size->constant = 0;
1306 preferred_stack_boundary /= BITS_PER_UNIT;
1307 if (preferred_stack_boundary > 1)
1309 /* We don't handle this case yet. To handle it correctly we have
1310 to add the delta, round and subtract the delta.
1311 Currently no machine description requires this support. */
1312 if (stack_pointer_delta & (preferred_stack_boundary - 1))
1313 abort ();
1314 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1317 if (reg_parm_stack_space > 0)
1319 args_size->var
1320 = size_binop (MAX_EXPR, args_size->var,
1321 ssize_int (reg_parm_stack_space));
1323 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1324 /* The area corresponding to register parameters is not to count in
1325 the size of the block we need. So make the adjustment. */
1326 args_size->var
1327 = size_binop (MINUS_EXPR, args_size->var,
1328 ssize_int (reg_parm_stack_space));
1329 #endif
1332 else
1334 preferred_stack_boundary /= BITS_PER_UNIT;
1335 if (preferred_stack_boundary < 1)
1336 preferred_stack_boundary = 1;
1337 args_size->constant = (((args_size->constant
1338 + stack_pointer_delta
1339 + preferred_stack_boundary - 1)
1340 / preferred_stack_boundary
1341 * preferred_stack_boundary)
1342 - stack_pointer_delta);
1344 args_size->constant = MAX (args_size->constant,
1345 reg_parm_stack_space);
1347 #ifdef MAYBE_REG_PARM_STACK_SPACE
1348 if (reg_parm_stack_space == 0)
1349 args_size->constant = 0;
1350 #endif
1352 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1353 args_size->constant -= reg_parm_stack_space;
1354 #endif
1356 return unadjusted_args_size;
1359 /* Precompute parameters as needed for a function call.
1361 FLAGS is mask of ECF_* constants.
1363 NUM_ACTUALS is the number of arguments.
1365 ARGS is an array containing information for each argument; this
1366 routine fills in the INITIAL_VALUE and VALUE fields for each
1367 precomputed argument. */
1369 static void
1370 precompute_arguments (int flags, int num_actuals, struct arg_data *args)
1372 int i;
1374 /* If this function call is cse'able, precompute all the parameters.
1375 Note that if the parameter is constructed into a temporary, this will
1376 cause an additional copy because the parameter will be constructed
1377 into a temporary location and then copied into the outgoing arguments.
1378 If a parameter contains a call to alloca and this function uses the
1379 stack, precompute the parameter. */
1381 /* If we preallocated the stack space, and some arguments must be passed
1382 on the stack, then we must precompute any parameter which contains a
1383 function call which will store arguments on the stack.
1384 Otherwise, evaluating the parameter may clobber previous parameters
1385 which have already been stored into the stack. (we have code to avoid
1386 such case by saving the outgoing stack arguments, but it results in
1387 worse code) */
1389 for (i = 0; i < num_actuals; i++)
1390 if ((flags & ECF_LIBCALL_BLOCK)
1391 || calls_function (args[i].tree_value, !ACCUMULATE_OUTGOING_ARGS))
1393 enum machine_mode mode;
1395 /* If this is an addressable type, we cannot pre-evaluate it. */
1396 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1397 abort ();
1399 args[i].value
1400 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1402 /* ANSI doesn't require a sequence point here,
1403 but PCC has one, so this will avoid some problems. */
1404 emit_queue ();
1406 args[i].initial_value = args[i].value
1407 = protect_from_queue (args[i].value, 0);
1409 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1410 if (mode != args[i].mode)
1412 args[i].value
1413 = convert_modes (args[i].mode, mode,
1414 args[i].value, args[i].unsignedp);
1415 #ifdef PROMOTE_FOR_CALL_ONLY
1416 /* CSE will replace this only if it contains args[i].value
1417 pseudo, so convert it down to the declared mode using
1418 a SUBREG. */
1419 if (GET_CODE (args[i].value) == REG
1420 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1422 args[i].initial_value
1423 = gen_lowpart_SUBREG (mode, args[i].value);
1424 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1425 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1426 args[i].unsignedp);
1428 #endif
1433 /* Given the current state of MUST_PREALLOCATE and information about
1434 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1435 compute and return the final value for MUST_PREALLOCATE. */
1437 static int
1438 finalize_must_preallocate (int must_preallocate, int num_actuals, struct arg_data *args, struct args_size *args_size)
1440 /* See if we have or want to preallocate stack space.
1442 If we would have to push a partially-in-regs parm
1443 before other stack parms, preallocate stack space instead.
1445 If the size of some parm is not a multiple of the required stack
1446 alignment, we must preallocate.
1448 If the total size of arguments that would otherwise create a copy in
1449 a temporary (such as a CALL) is more than half the total argument list
1450 size, preallocation is faster.
1452 Another reason to preallocate is if we have a machine (like the m88k)
1453 where stack alignment is required to be maintained between every
1454 pair of insns, not just when the call is made. However, we assume here
1455 that such machines either do not have push insns (and hence preallocation
1456 would occur anyway) or the problem is taken care of with
1457 PUSH_ROUNDING. */
1459 if (! must_preallocate)
1461 int partial_seen = 0;
1462 int copy_to_evaluate_size = 0;
1463 int i;
1465 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1467 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1468 partial_seen = 1;
1469 else if (partial_seen && args[i].reg == 0)
1470 must_preallocate = 1;
1472 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1473 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1474 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1475 || TREE_CODE (args[i].tree_value) == COND_EXPR
1476 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1477 copy_to_evaluate_size
1478 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1481 if (copy_to_evaluate_size * 2 >= args_size->constant
1482 && args_size->constant > 0)
1483 must_preallocate = 1;
1485 return must_preallocate;
1488 /* If we preallocated stack space, compute the address of each argument
1489 and store it into the ARGS array.
1491 We need not ensure it is a valid memory address here; it will be
1492 validized when it is used.
1494 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1496 static void
1497 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1499 if (argblock)
1501 rtx arg_reg = argblock;
1502 int i, arg_offset = 0;
1504 if (GET_CODE (argblock) == PLUS)
1505 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1507 for (i = 0; i < num_actuals; i++)
1509 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1510 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1511 rtx addr;
1513 /* Skip this parm if it will not be passed on the stack. */
1514 if (! args[i].pass_on_stack && args[i].reg != 0)
1515 continue;
1517 if (GET_CODE (offset) == CONST_INT)
1518 addr = plus_constant (arg_reg, INTVAL (offset));
1519 else
1520 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1522 addr = plus_constant (addr, arg_offset);
1523 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1524 set_mem_align (args[i].stack, PARM_BOUNDARY);
1525 set_mem_attributes (args[i].stack,
1526 TREE_TYPE (args[i].tree_value), 1);
1528 if (GET_CODE (slot_offset) == CONST_INT)
1529 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1530 else
1531 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1533 addr = plus_constant (addr, arg_offset);
1534 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1535 set_mem_align (args[i].stack_slot, PARM_BOUNDARY);
1536 set_mem_attributes (args[i].stack_slot,
1537 TREE_TYPE (args[i].tree_value), 1);
1539 /* Function incoming arguments may overlap with sibling call
1540 outgoing arguments and we cannot allow reordering of reads
1541 from function arguments with stores to outgoing arguments
1542 of sibling calls. */
1543 set_mem_alias_set (args[i].stack, 0);
1544 set_mem_alias_set (args[i].stack_slot, 0);
1549 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1550 in a call instruction.
1552 FNDECL is the tree node for the target function. For an indirect call
1553 FNDECL will be NULL_TREE.
1555 ADDR is the operand 0 of CALL_EXPR for this call. */
1557 static rtx
1558 rtx_for_function_call (tree fndecl, tree addr)
1560 rtx funexp;
1562 /* Get the function to call, in the form of RTL. */
1563 if (fndecl)
1565 /* If this is the first use of the function, see if we need to
1566 make an external definition for it. */
1567 if (! TREE_USED (fndecl))
1569 assemble_external (fndecl);
1570 TREE_USED (fndecl) = 1;
1573 /* Get a SYMBOL_REF rtx for the function address. */
1574 funexp = XEXP (DECL_RTL (fndecl), 0);
1576 else
1577 /* Generate an rtx (probably a pseudo-register) for the address. */
1579 push_temp_slots ();
1580 funexp = expand_expr (addr, NULL_RTX, VOIDmode, 0);
1581 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1582 emit_queue ();
1584 return funexp;
1587 /* Do the register loads required for any wholly-register parms or any
1588 parms which are passed both on the stack and in a register. Their
1589 expressions were already evaluated.
1591 Mark all register-parms as living through the call, putting these USE
1592 insns in the CALL_INSN_FUNCTION_USAGE field.
1594 When IS_SIBCALL, perform the check_sibcall_overlap_argument_overlap
1595 checking, setting *SIBCALL_FAILURE if appropriate. */
1597 static void
1598 load_register_parameters (struct arg_data *args, int num_actuals,
1599 rtx *call_fusage, int flags, int is_sibcall,
1600 int *sibcall_failure)
1602 int i, j;
1604 #ifdef LOAD_ARGS_REVERSED
1605 for (i = num_actuals - 1; i >= 0; i--)
1606 #else
1607 for (i = 0; i < num_actuals; i++)
1608 #endif
1610 rtx reg = ((flags & ECF_SIBCALL)
1611 ? args[i].tail_call_reg : args[i].reg);
1612 if (reg)
1614 int partial = args[i].partial;
1615 int nregs;
1616 int size = 0;
1617 rtx before_arg = get_last_insn ();
1618 /* Set to non-negative if must move a word at a time, even if just
1619 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1620 we just use a normal move insn. This value can be zero if the
1621 argument is a zero size structure with no fields. */
1622 nregs = -1;
1623 if (partial)
1624 nregs = partial;
1625 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1627 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1628 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1630 else
1631 size = GET_MODE_SIZE (args[i].mode);
1633 /* Handle calls that pass values in multiple non-contiguous
1634 locations. The Irix 6 ABI has examples of this. */
1636 if (GET_CODE (reg) == PARALLEL)
1638 tree type = TREE_TYPE (args[i].tree_value);
1639 emit_group_load (reg, args[i].value, type,
1640 int_size_in_bytes (type));
1643 /* If simple case, just do move. If normal partial, store_one_arg
1644 has already loaded the register for us. In all other cases,
1645 load the register(s) from memory. */
1647 else if (nregs == -1)
1649 emit_move_insn (reg, args[i].value);
1650 #ifdef BLOCK_REG_PADDING
1651 /* Handle case where we have a value that needs shifting
1652 up to the msb. eg. a QImode value and we're padding
1653 upward on a BYTES_BIG_ENDIAN machine. */
1654 if (size < UNITS_PER_WORD
1655 && (args[i].locate.where_pad
1656 == (BYTES_BIG_ENDIAN ? upward : downward)))
1658 rtx x;
1659 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1661 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1662 report the whole reg as used. Strictly speaking, the
1663 call only uses SIZE bytes at the msb end, but it doesn't
1664 seem worth generating rtl to say that. */
1665 reg = gen_rtx_REG (word_mode, REGNO (reg));
1666 x = expand_binop (word_mode, ashl_optab, reg,
1667 GEN_INT (shift), reg, 1, OPTAB_WIDEN);
1668 if (x != reg)
1669 emit_move_insn (reg, x);
1671 #endif
1674 /* If we have pre-computed the values to put in the registers in
1675 the case of non-aligned structures, copy them in now. */
1677 else if (args[i].n_aligned_regs != 0)
1678 for (j = 0; j < args[i].n_aligned_regs; j++)
1679 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1680 args[i].aligned_regs[j]);
1682 else if (partial == 0 || args[i].pass_on_stack)
1684 rtx mem = validize_mem (args[i].value);
1686 #ifdef BLOCK_REG_PADDING
1687 /* Handle a BLKmode that needs shifting. */
1688 if (nregs == 1 && size < UNITS_PER_WORD
1689 && args[i].locate.where_pad == downward)
1691 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1692 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1693 rtx x = gen_reg_rtx (word_mode);
1694 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1695 optab dir = BYTES_BIG_ENDIAN ? lshr_optab : ashl_optab;
1697 emit_move_insn (x, tem);
1698 x = expand_binop (word_mode, dir, x, GEN_INT (shift),
1699 ri, 1, OPTAB_WIDEN);
1700 if (x != ri)
1701 emit_move_insn (ri, x);
1703 else
1704 #endif
1705 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1708 /* When a parameter is a block, and perhaps in other cases, it is
1709 possible that it did a load from an argument slot that was
1710 already clobbered. */
1711 if (is_sibcall
1712 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1713 *sibcall_failure = 1;
1715 /* Handle calls that pass values in multiple non-contiguous
1716 locations. The Irix 6 ABI has examples of this. */
1717 if (GET_CODE (reg) == PARALLEL)
1718 use_group_regs (call_fusage, reg);
1719 else if (nregs == -1)
1720 use_reg (call_fusage, reg);
1721 else
1722 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1727 /* Try to integrate function. See expand_inline_function for documentation
1728 about the parameters. */
1730 static rtx
1731 try_to_integrate (tree fndecl, tree actparms, rtx target, int ignore,
1732 tree type, rtx structure_value_addr)
1734 rtx temp;
1735 rtx before_call;
1736 int i;
1737 rtx old_stack_level = 0;
1738 int reg_parm_stack_space = 0;
1740 #ifdef REG_PARM_STACK_SPACE
1741 #ifdef MAYBE_REG_PARM_STACK_SPACE
1742 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1743 #else
1744 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1745 #endif
1746 #endif
1748 before_call = get_last_insn ();
1750 timevar_push (TV_INTEGRATION);
1752 temp = expand_inline_function (fndecl, actparms, target,
1753 ignore, type,
1754 structure_value_addr);
1756 timevar_pop (TV_INTEGRATION);
1758 /* If inlining succeeded, return. */
1759 if (temp != (rtx) (size_t) - 1)
1761 if (ACCUMULATE_OUTGOING_ARGS)
1763 /* If the outgoing argument list must be preserved, push
1764 the stack before executing the inlined function if it
1765 makes any calls. */
1767 i = reg_parm_stack_space;
1768 if (i > highest_outgoing_arg_in_use)
1769 i = highest_outgoing_arg_in_use;
1770 while (--i >= 0 && stack_usage_map[i] == 0)
1773 if (stack_arg_under_construction || i >= 0)
1775 rtx first_insn
1776 = before_call ? NEXT_INSN (before_call) : get_insns ();
1777 rtx insn = NULL_RTX, seq;
1779 /* Look for a call in the inline function code.
1780 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1781 nonzero then there is a call and it is not necessary
1782 to scan the insns. */
1784 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
1785 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1786 if (GET_CODE (insn) == CALL_INSN)
1787 break;
1789 if (insn)
1791 /* Reserve enough stack space so that the largest
1792 argument list of any function call in the inline
1793 function does not overlap the argument list being
1794 evaluated. This is usually an overestimate because
1795 allocate_dynamic_stack_space reserves space for an
1796 outgoing argument list in addition to the requested
1797 space, but there is no way to ask for stack space such
1798 that an argument list of a certain length can be
1799 safely constructed.
1801 Add the stack space reserved for register arguments, if
1802 any, in the inline function. What is really needed is the
1803 largest value of reg_parm_stack_space in the inline
1804 function, but that is not available. Using the current
1805 value of reg_parm_stack_space is wrong, but gives
1806 correct results on all supported machines. */
1808 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
1809 + reg_parm_stack_space);
1811 start_sequence ();
1812 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1813 allocate_dynamic_stack_space (GEN_INT (adjust),
1814 NULL_RTX, BITS_PER_UNIT);
1815 seq = get_insns ();
1816 end_sequence ();
1817 emit_insn_before (seq, first_insn);
1818 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1823 /* If the result is equivalent to TARGET, return TARGET to simplify
1824 checks in store_expr. They can be equivalent but not equal in the
1825 case of a function that returns BLKmode. */
1826 if (temp != target && rtx_equal_p (temp, target))
1827 return target;
1828 return temp;
1831 /* If inlining failed, mark FNDECL as needing to be compiled
1832 separately after all. If function was declared inline,
1833 give a warning. */
1834 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1835 && optimize > 0 && !TREE_ADDRESSABLE (fndecl))
1837 warning ("%Jinlining failed in call to '%F'", fndecl, fndecl);
1838 warning ("called from here");
1840 (*lang_hooks.mark_addressable) (fndecl);
1841 return (rtx) (size_t) - 1;
1844 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1845 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1846 bytes, then we would need to push some additional bytes to pad the
1847 arguments. So, we compute an adjust to the stack pointer for an
1848 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1849 bytes. Then, when the arguments are pushed the stack will be perfectly
1850 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1851 be popped after the call. Returns the adjustment. */
1853 static int
1854 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1855 struct args_size *args_size,
1856 int preferred_unit_stack_boundary)
1858 /* The number of bytes to pop so that the stack will be
1859 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1860 HOST_WIDE_INT adjustment;
1861 /* The alignment of the stack after the arguments are pushed, if we
1862 just pushed the arguments without adjust the stack here. */
1863 HOST_WIDE_INT unadjusted_alignment;
1865 unadjusted_alignment
1866 = ((stack_pointer_delta + unadjusted_args_size)
1867 % preferred_unit_stack_boundary);
1869 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1870 as possible -- leaving just enough left to cancel out the
1871 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1872 PENDING_STACK_ADJUST is non-negative, and congruent to
1873 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1875 /* Begin by trying to pop all the bytes. */
1876 unadjusted_alignment
1877 = (unadjusted_alignment
1878 - (pending_stack_adjust % preferred_unit_stack_boundary));
1879 adjustment = pending_stack_adjust;
1880 /* Push enough additional bytes that the stack will be aligned
1881 after the arguments are pushed. */
1882 if (preferred_unit_stack_boundary > 1)
1884 if (unadjusted_alignment > 0)
1885 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1886 else
1887 adjustment += unadjusted_alignment;
1890 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1891 bytes after the call. The right number is the entire
1892 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1893 by the arguments in the first place. */
1894 args_size->constant
1895 = pending_stack_adjust - adjustment + unadjusted_args_size;
1897 return adjustment;
1900 /* Scan X expression if it does not dereference any argument slots
1901 we already clobbered by tail call arguments (as noted in stored_args_map
1902 bitmap).
1903 Return nonzero if X expression dereferences such argument slots,
1904 zero otherwise. */
1906 static int
1907 check_sibcall_argument_overlap_1 (rtx x)
1909 RTX_CODE code;
1910 int i, j;
1911 unsigned int k;
1912 const char *fmt;
1914 if (x == NULL_RTX)
1915 return 0;
1917 code = GET_CODE (x);
1919 if (code == MEM)
1921 if (XEXP (x, 0) == current_function_internal_arg_pointer)
1922 i = 0;
1923 else if (GET_CODE (XEXP (x, 0)) == PLUS
1924 && XEXP (XEXP (x, 0), 0) ==
1925 current_function_internal_arg_pointer
1926 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
1927 i = INTVAL (XEXP (XEXP (x, 0), 1));
1928 else
1929 return 0;
1931 #ifdef ARGS_GROW_DOWNWARD
1932 i = -i - GET_MODE_SIZE (GET_MODE (x));
1933 #endif
1935 for (k = 0; k < GET_MODE_SIZE (GET_MODE (x)); k++)
1936 if (i + k < stored_args_map->n_bits
1937 && TEST_BIT (stored_args_map, i + k))
1938 return 1;
1940 return 0;
1943 /* Scan all subexpressions. */
1944 fmt = GET_RTX_FORMAT (code);
1945 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1947 if (*fmt == 'e')
1949 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1950 return 1;
1952 else if (*fmt == 'E')
1954 for (j = 0; j < XVECLEN (x, i); j++)
1955 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1956 return 1;
1959 return 0;
1962 /* Scan sequence after INSN if it does not dereference any argument slots
1963 we already clobbered by tail call arguments (as noted in stored_args_map
1964 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1965 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1966 should be 0). Return nonzero if sequence after INSN dereferences such argument
1967 slots, zero otherwise. */
1969 static int
1970 check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
1972 int low, high;
1974 if (insn == NULL_RTX)
1975 insn = get_insns ();
1976 else
1977 insn = NEXT_INSN (insn);
1979 for (; insn; insn = NEXT_INSN (insn))
1980 if (INSN_P (insn)
1981 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
1982 break;
1984 if (mark_stored_args_map)
1986 #ifdef ARGS_GROW_DOWNWARD
1987 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
1988 #else
1989 low = arg->locate.slot_offset.constant;
1990 #endif
1992 for (high = low + arg->locate.size.constant; low < high; low++)
1993 SET_BIT (stored_args_map, low);
1995 return insn != NULL_RTX;
1998 static tree
1999 fix_unsafe_tree (tree t)
2001 switch (unsafe_for_reeval (t))
2003 case 0: /* Safe. */
2004 break;
2006 case 1: /* Mildly unsafe. */
2007 t = unsave_expr (t);
2008 break;
2010 case 2: /* Wildly unsafe. */
2012 tree var = build_decl (VAR_DECL, NULL_TREE,
2013 TREE_TYPE (t));
2014 SET_DECL_RTL (var,
2015 expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL));
2016 t = var;
2018 break;
2020 default:
2021 abort ();
2023 return t;
2027 /* If function value *VALUE was returned at the most significant end of a
2028 register, shift it towards the least significant end and convert it to
2029 TYPE's mode. Return true and update *VALUE if some action was needed.
2031 TYPE is the type of the function's return value, which is known not
2032 to have mode BLKmode. */
2034 static bool
2035 shift_returned_value (tree type, rtx *value)
2037 if (targetm.calls.return_in_msb (type))
2039 HOST_WIDE_INT shift;
2041 shift = (GET_MODE_BITSIZE (GET_MODE (*value))
2042 - BITS_PER_UNIT * int_size_in_bytes (type));
2043 if (shift > 0)
2045 *value = expand_binop (GET_MODE (*value), lshr_optab, *value,
2046 GEN_INT (shift), 0, 1, OPTAB_WIDEN);
2047 *value = convert_to_mode (TYPE_MODE (type), *value, 0);
2048 return true;
2051 return false;
2054 /* Generate all the code for a function call
2055 and return an rtx for its value.
2056 Store the value in TARGET (specified as an rtx) if convenient.
2057 If the value is stored in TARGET then TARGET is returned.
2058 If IGNORE is nonzero, then we ignore the value of the function call. */
2061 expand_call (tree exp, rtx target, int ignore)
2063 /* Nonzero if we are currently expanding a call. */
2064 static int currently_expanding_call = 0;
2066 /* List of actual parameters. */
2067 tree actparms = TREE_OPERAND (exp, 1);
2068 /* RTX for the function to be called. */
2069 rtx funexp;
2070 /* Sequence of insns to perform a tail recursive "call". */
2071 rtx tail_recursion_insns = NULL_RTX;
2072 /* Sequence of insns to perform a normal "call". */
2073 rtx normal_call_insns = NULL_RTX;
2074 /* Sequence of insns to perform a tail recursive "call". */
2075 rtx tail_call_insns = NULL_RTX;
2076 /* Data type of the function. */
2077 tree funtype;
2078 tree type_arg_types;
2079 /* Declaration of the function being called,
2080 or 0 if the function is computed (not known by name). */
2081 tree fndecl = 0;
2082 rtx insn;
2083 int try_tail_call = 1;
2084 int try_tail_recursion = 1;
2085 int pass;
2087 /* Register in which non-BLKmode value will be returned,
2088 or 0 if no value or if value is BLKmode. */
2089 rtx valreg;
2090 /* Address where we should return a BLKmode value;
2091 0 if value not BLKmode. */
2092 rtx structure_value_addr = 0;
2093 /* Nonzero if that address is being passed by treating it as
2094 an extra, implicit first parameter. Otherwise,
2095 it is passed by being copied directly into struct_value_rtx. */
2096 int structure_value_addr_parm = 0;
2097 /* Size of aggregate value wanted, or zero if none wanted
2098 or if we are using the non-reentrant PCC calling convention
2099 or expecting the value in registers. */
2100 HOST_WIDE_INT struct_value_size = 0;
2101 /* Nonzero if called function returns an aggregate in memory PCC style,
2102 by returning the address of where to find it. */
2103 int pcc_struct_value = 0;
2104 rtx struct_value = 0;
2106 /* Number of actual parameters in this call, including struct value addr. */
2107 int num_actuals;
2108 /* Number of named args. Args after this are anonymous ones
2109 and they must all go on the stack. */
2110 int n_named_args;
2112 /* Vector of information about each argument.
2113 Arguments are numbered in the order they will be pushed,
2114 not the order they are written. */
2115 struct arg_data *args;
2117 /* Total size in bytes of all the stack-parms scanned so far. */
2118 struct args_size args_size;
2119 struct args_size adjusted_args_size;
2120 /* Size of arguments before any adjustments (such as rounding). */
2121 int unadjusted_args_size;
2122 /* Data on reg parms scanned so far. */
2123 CUMULATIVE_ARGS args_so_far;
2124 /* Nonzero if a reg parm has been scanned. */
2125 int reg_parm_seen;
2126 /* Nonzero if this is an indirect function call. */
2128 /* Nonzero if we must avoid push-insns in the args for this call.
2129 If stack space is allocated for register parameters, but not by the
2130 caller, then it is preallocated in the fixed part of the stack frame.
2131 So the entire argument block must then be preallocated (i.e., we
2132 ignore PUSH_ROUNDING in that case). */
2134 int must_preallocate = !PUSH_ARGS;
2136 /* Size of the stack reserved for parameter registers. */
2137 int reg_parm_stack_space = 0;
2139 /* Address of space preallocated for stack parms
2140 (on machines that lack push insns), or 0 if space not preallocated. */
2141 rtx argblock = 0;
2143 /* Mask of ECF_ flags. */
2144 int flags = 0;
2145 /* Nonzero if this is a call to an inline function. */
2146 int is_integrable = 0;
2147 #ifdef REG_PARM_STACK_SPACE
2148 /* Define the boundary of the register parm stack space that needs to be
2149 saved, if any. */
2150 int low_to_save, high_to_save;
2151 rtx save_area = 0; /* Place that it is saved */
2152 #endif
2154 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2155 rtx temp_target = 0;
2156 char *initial_stack_usage_map = stack_usage_map;
2158 int old_stack_allocated;
2160 /* State variables to track stack modifications. */
2161 rtx old_stack_level = 0;
2162 int old_stack_arg_under_construction = 0;
2163 int old_pending_adj = 0;
2164 int old_inhibit_defer_pop = inhibit_defer_pop;
2166 /* Some stack pointer alterations we make are performed via
2167 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2168 which we then also need to save/restore along the way. */
2169 int old_stack_pointer_delta = 0;
2171 rtx call_fusage;
2172 tree p = TREE_OPERAND (exp, 0);
2173 tree addr = TREE_OPERAND (exp, 0);
2174 int i;
2175 /* The alignment of the stack, in bits. */
2176 HOST_WIDE_INT preferred_stack_boundary;
2177 /* The alignment of the stack, in bytes. */
2178 HOST_WIDE_INT preferred_unit_stack_boundary;
2180 /* See if this is "nothrow" function call. */
2181 if (TREE_NOTHROW (exp))
2182 flags |= ECF_NOTHROW;
2184 /* See if we can find a DECL-node for the actual function.
2185 As a result, decide whether this is a call to an integrable function. */
2187 fndecl = get_callee_fndecl (exp);
2188 if (fndecl)
2190 if (!flag_no_inline
2191 && fndecl != current_function_decl
2192 && DECL_INLINE (fndecl)
2193 && DECL_SAVED_INSNS (fndecl)
2194 && DECL_SAVED_INSNS (fndecl)->inlinable)
2195 is_integrable = 1;
2196 else if (! TREE_ADDRESSABLE (fndecl))
2198 /* In case this function later becomes inlinable,
2199 record that there was already a non-inline call to it.
2201 Use abstraction instead of setting TREE_ADDRESSABLE
2202 directly. */
2203 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
2204 && optimize > 0)
2206 warning ("%Jcan't inline call to '%F'", fndecl, fndecl);
2207 warning ("called from here");
2209 (*lang_hooks.mark_addressable) (fndecl);
2212 if (ignore
2213 && lookup_attribute ("warn_unused_result",
2214 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
2215 warning ("ignoring return value of `%D', "
2216 "declared with attribute warn_unused_result", fndecl);
2218 flags |= flags_from_decl_or_type (fndecl);
2221 /* If we don't have specific function to call, see if we have a
2222 attributes set in the type. */
2223 else
2225 if (ignore
2226 && lookup_attribute ("warn_unused_result",
2227 TYPE_ATTRIBUTES (TREE_TYPE (TREE_TYPE (p)))))
2228 warning ("ignoring return value of function "
2229 "declared with attribute warn_unused_result");
2230 flags |= flags_from_decl_or_type (TREE_TYPE (TREE_TYPE (p)));
2233 struct_value = targetm.calls.struct_value_rtx (fndecl ? TREE_TYPE (fndecl) : 0, 0);
2235 /* Warn if this value is an aggregate type,
2236 regardless of which calling convention we are using for it. */
2237 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
2238 warning ("function call has aggregate value");
2240 /* If the result of a pure or const function call is ignored (or void),
2241 and none of its arguments are volatile, we can avoid expanding the
2242 call and just evaluate the arguments for side-effects. */
2243 if ((flags & (ECF_CONST | ECF_PURE))
2244 && (ignore || target == const0_rtx
2245 || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
2247 bool volatilep = false;
2248 tree arg;
2250 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
2251 if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
2253 volatilep = true;
2254 break;
2257 if (! volatilep)
2259 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
2260 expand_expr (TREE_VALUE (arg), const0_rtx,
2261 VOIDmode, EXPAND_NORMAL);
2262 return const0_rtx;
2266 #ifdef REG_PARM_STACK_SPACE
2267 #ifdef MAYBE_REG_PARM_STACK_SPACE
2268 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2269 #else
2270 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2271 #endif
2272 #endif
2274 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2275 if (reg_parm_stack_space > 0 && PUSH_ARGS)
2276 must_preallocate = 1;
2277 #endif
2279 /* Set up a place to return a structure. */
2281 /* Cater to broken compilers. */
2282 if (aggregate_value_p (exp, fndecl))
2284 /* This call returns a big structure. */
2285 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
2287 #ifdef PCC_STATIC_STRUCT_RETURN
2289 pcc_struct_value = 1;
2290 /* Easier than making that case work right. */
2291 if (is_integrable)
2293 /* In case this is a static function, note that it has been
2294 used. */
2295 if (! TREE_ADDRESSABLE (fndecl))
2296 (*lang_hooks.mark_addressable) (fndecl);
2297 is_integrable = 0;
2300 #else /* not PCC_STATIC_STRUCT_RETURN */
2302 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2304 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (exp))
2306 /* The structure value address arg is already in actparms.
2307 Pull it out. It might be nice to just leave it there, but
2308 we need to set structure_value_addr. */
2309 tree return_arg = TREE_VALUE (actparms);
2310 actparms = TREE_CHAIN (actparms);
2311 structure_value_addr = expand_expr (return_arg, NULL_RTX,
2312 VOIDmode, EXPAND_NORMAL);
2314 else if (target && GET_CODE (target) == MEM)
2315 structure_value_addr = XEXP (target, 0);
2316 else
2318 /* For variable-sized objects, we must be called with a target
2319 specified. If we were to allocate space on the stack here,
2320 we would have no way of knowing when to free it. */
2321 rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
2323 mark_temp_addr_taken (d);
2324 structure_value_addr = XEXP (d, 0);
2325 target = 0;
2328 #endif /* not PCC_STATIC_STRUCT_RETURN */
2331 /* If called function is inline, try to integrate it. */
2333 if (is_integrable)
2335 rtx temp = try_to_integrate (fndecl, actparms, target,
2336 ignore, TREE_TYPE (exp),
2337 structure_value_addr);
2338 if (temp != (rtx) (size_t) - 1)
2339 return temp;
2342 /* Figure out the amount to which the stack should be aligned. */
2343 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2344 if (fndecl)
2346 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2347 if (i && i->preferred_incoming_stack_boundary)
2348 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2351 /* Operand 0 is a pointer-to-function; get the type of the function. */
2352 funtype = TREE_TYPE (addr);
2353 if (! POINTER_TYPE_P (funtype))
2354 abort ();
2355 funtype = TREE_TYPE (funtype);
2357 /* Munge the tree to split complex arguments into their imaginary
2358 and real parts. */
2359 if (SPLIT_COMPLEX_ARGS)
2361 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2362 actparms = split_complex_values (actparms);
2364 else
2365 type_arg_types = TYPE_ARG_TYPES (funtype);
2367 /* See if this is a call to a function that can return more than once
2368 or a call to longjmp or malloc. */
2369 flags |= special_function_p (fndecl, flags);
2371 if (flags & ECF_MAY_BE_ALLOCA)
2372 current_function_calls_alloca = 1;
2374 /* If struct_value_rtx is 0, it means pass the address
2375 as if it were an extra parameter. */
2376 if (structure_value_addr && struct_value == 0)
2378 /* If structure_value_addr is a REG other than
2379 virtual_outgoing_args_rtx, we can use always use it. If it
2380 is not a REG, we must always copy it into a register.
2381 If it is virtual_outgoing_args_rtx, we must copy it to another
2382 register in some cases. */
2383 rtx temp = (GET_CODE (structure_value_addr) != REG
2384 || (ACCUMULATE_OUTGOING_ARGS
2385 && stack_arg_under_construction
2386 && structure_value_addr == virtual_outgoing_args_rtx)
2387 ? copy_addr_to_reg (structure_value_addr)
2388 : structure_value_addr);
2390 actparms
2391 = tree_cons (error_mark_node,
2392 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2393 temp),
2394 actparms);
2395 structure_value_addr_parm = 1;
2398 /* Count the arguments and set NUM_ACTUALS. */
2399 for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2400 num_actuals++;
2402 /* Start updating where the next arg would go.
2404 On some machines (such as the PA) indirect calls have a different
2405 calling convention than normal calls. The last argument in
2406 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2407 or not. */
2408 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl);
2410 /* Compute number of named args.
2411 Normally, don't include the last named arg if anonymous args follow.
2412 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
2413 (If no anonymous args follow, the result of list_length is actually
2414 one too large. This is harmless.)
2416 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
2417 zero, this machine will be able to place unnamed args that were
2418 passed in registers into the stack. So treat all args as named.
2419 This allows the insns emitting for a specific argument list to be
2420 independent of the function declaration.
2422 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any
2423 reliable way to pass unnamed args in registers, so we must force
2424 them into memory. */
2426 if ((targetm.calls.strict_argument_naming (&args_so_far)
2427 || ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
2428 && type_arg_types != 0)
2429 n_named_args
2430 = (list_length (type_arg_types)
2431 /* Don't include the last named arg. */
2432 - (targetm.calls.strict_argument_naming (&args_so_far) ? 0 : 1)
2433 /* Count the struct value address, if it is passed as a parm. */
2434 + structure_value_addr_parm);
2435 else
2436 /* If we know nothing, treat all args as named. */
2437 n_named_args = num_actuals;
2439 /* Make a vector to hold all the information about each arg. */
2440 args = alloca (num_actuals * sizeof (struct arg_data));
2441 memset (args, 0, num_actuals * sizeof (struct arg_data));
2443 /* Build up entries in the ARGS array, compute the size of the
2444 arguments into ARGS_SIZE, etc. */
2445 initialize_argument_information (num_actuals, args, &args_size,
2446 n_named_args, actparms, fndecl,
2447 &args_so_far, reg_parm_stack_space,
2448 &old_stack_level, &old_pending_adj,
2449 &must_preallocate, &flags);
2451 if (args_size.var)
2453 /* If this function requires a variable-sized argument list, don't
2454 try to make a cse'able block for this call. We may be able to
2455 do this eventually, but it is too complicated to keep track of
2456 what insns go in the cse'able block and which don't. */
2458 flags &= ~ECF_LIBCALL_BLOCK;
2459 must_preallocate = 1;
2462 /* Now make final decision about preallocating stack space. */
2463 must_preallocate = finalize_must_preallocate (must_preallocate,
2464 num_actuals, args,
2465 &args_size);
2467 /* If the structure value address will reference the stack pointer, we
2468 must stabilize it. We don't need to do this if we know that we are
2469 not going to adjust the stack pointer in processing this call. */
2471 if (structure_value_addr
2472 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2473 || reg_mentioned_p (virtual_outgoing_args_rtx,
2474 structure_value_addr))
2475 && (args_size.var
2476 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2477 structure_value_addr = copy_to_reg (structure_value_addr);
2479 /* Tail calls can make things harder to debug, and we're traditionally
2480 pushed these optimizations into -O2. Don't try if we're already
2481 expanding a call, as that means we're an argument. Don't try if
2482 there's cleanups, as we know there's code to follow the call.
2484 If rtx_equal_function_value_matters is false, that means we've
2485 finished with regular parsing. Which means that some of the
2486 machinery we use to generate tail-calls is no longer in place.
2487 This is most often true of sjlj-exceptions, which we couldn't
2488 tail-call to anyway. */
2490 if (currently_expanding_call++ != 0
2491 || !flag_optimize_sibling_calls
2492 || !rtx_equal_function_value_matters
2493 || any_pending_cleanups ()
2494 || args_size.var)
2495 try_tail_call = try_tail_recursion = 0;
2497 /* Tail recursion fails, when we are not dealing with recursive calls. */
2498 if (!try_tail_recursion
2499 || TREE_CODE (addr) != ADDR_EXPR
2500 || TREE_OPERAND (addr, 0) != current_function_decl)
2501 try_tail_recursion = 0;
2503 /* Rest of purposes for tail call optimizations to fail. */
2504 if (
2505 #ifdef HAVE_sibcall_epilogue
2506 !HAVE_sibcall_epilogue
2507 #else
2509 #endif
2510 || !try_tail_call
2511 /* Doing sibling call optimization needs some work, since
2512 structure_value_addr can be allocated on the stack.
2513 It does not seem worth the effort since few optimizable
2514 sibling calls will return a structure. */
2515 || structure_value_addr != NULL_RTX
2516 /* Check whether the target is able to optimize the call
2517 into a sibcall. */
2518 || !(*targetm.function_ok_for_sibcall) (fndecl, exp)
2519 /* Functions that do not return exactly once may not be sibcall
2520 optimized. */
2521 || (flags & (ECF_RETURNS_TWICE | ECF_LONGJMP | ECF_NORETURN))
2522 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2523 /* If the called function is nested in the current one, it might access
2524 some of the caller's arguments, but could clobber them beforehand if
2525 the argument areas are shared. */
2526 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2527 /* If this function requires more stack slots than the current
2528 function, we cannot change it into a sibling call. */
2529 || args_size.constant > current_function_args_size
2530 /* If the callee pops its own arguments, then it must pop exactly
2531 the same number of arguments as the current function. */
2532 || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2533 != RETURN_POPS_ARGS (current_function_decl,
2534 TREE_TYPE (current_function_decl),
2535 current_function_args_size))
2536 || !(*lang_hooks.decls.ok_for_sibcall) (fndecl))
2537 try_tail_call = 0;
2539 if (try_tail_call || try_tail_recursion)
2541 int end, inc;
2542 actparms = NULL_TREE;
2543 /* Ok, we're going to give the tail call the old college try.
2544 This means we're going to evaluate the function arguments
2545 up to three times. There are two degrees of badness we can
2546 encounter, those that can be unsaved and those that can't.
2547 (See unsafe_for_reeval commentary for details.)
2549 Generate a new argument list. Pass safe arguments through
2550 unchanged. For the easy badness wrap them in UNSAVE_EXPRs.
2551 For hard badness, evaluate them now and put their resulting
2552 rtx in a temporary VAR_DECL.
2554 initialize_argument_information has ordered the array for the
2555 order to be pushed, and we must remember this when reconstructing
2556 the original argument order. */
2558 if (PUSH_ARGS_REVERSED)
2560 inc = 1;
2561 i = 0;
2562 end = num_actuals;
2564 else
2566 inc = -1;
2567 i = num_actuals - 1;
2568 end = -1;
2571 for (; i != end; i += inc)
2573 args[i].tree_value = fix_unsafe_tree (args[i].tree_value);
2574 /* We need to build actparms for optimize_tail_recursion. We can
2575 safely trash away TREE_PURPOSE, since it is unused by this
2576 function. */
2577 if (try_tail_recursion)
2578 actparms = tree_cons (NULL_TREE, args[i].tree_value, actparms);
2580 /* Do the same for the function address if it is an expression. */
2581 if (!fndecl)
2582 addr = fix_unsafe_tree (addr);
2583 /* Expanding one of those dangerous arguments could have added
2584 cleanups, but otherwise give it a whirl. */
2585 if (any_pending_cleanups ())
2586 try_tail_call = try_tail_recursion = 0;
2589 /* Generate a tail recursion sequence when calling ourselves. */
2591 if (try_tail_recursion)
2593 /* We want to emit any pending stack adjustments before the tail
2594 recursion "call". That way we know any adjustment after the tail
2595 recursion call can be ignored if we indeed use the tail recursion
2596 call expansion. */
2597 int save_pending_stack_adjust = pending_stack_adjust;
2598 int save_stack_pointer_delta = stack_pointer_delta;
2600 /* Emit any queued insns now; otherwise they would end up in
2601 only one of the alternates. */
2602 emit_queue ();
2604 /* Use a new sequence to hold any RTL we generate. We do not even
2605 know if we will use this RTL yet. The final decision can not be
2606 made until after RTL generation for the entire function is
2607 complete. */
2608 start_sequence ();
2609 /* If expanding any of the arguments creates cleanups, we can't
2610 do a tailcall. So, we'll need to pop the pending cleanups
2611 list. If, however, all goes well, and there are no cleanups
2612 then the call to expand_start_target_temps will have no
2613 effect. */
2614 expand_start_target_temps ();
2615 if (optimize_tail_recursion (actparms, get_last_insn ()))
2617 if (any_pending_cleanups ())
2618 try_tail_call = try_tail_recursion = 0;
2619 else
2620 tail_recursion_insns = get_insns ();
2622 expand_end_target_temps ();
2623 end_sequence ();
2625 /* Restore the original pending stack adjustment for the sibling and
2626 normal call cases below. */
2627 pending_stack_adjust = save_pending_stack_adjust;
2628 stack_pointer_delta = save_stack_pointer_delta;
2631 if (profile_arc_flag && (flags & ECF_FORK_OR_EXEC))
2633 /* A fork duplicates the profile information, and an exec discards
2634 it. We can't rely on fork/exec to be paired. So write out the
2635 profile information we have gathered so far, and clear it. */
2636 /* ??? When Linux's __clone is called with CLONE_VM set, profiling
2637 is subject to race conditions, just as with multithreaded
2638 programs. */
2640 emit_library_call (gcov_flush_libfunc, LCT_ALWAYS_RETURN, VOIDmode, 0);
2643 /* Ensure current function's preferred stack boundary is at least
2644 what we need. We don't have to increase alignment for recursive
2645 functions. */
2646 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2647 && fndecl != current_function_decl)
2648 cfun->preferred_stack_boundary = preferred_stack_boundary;
2649 if (fndecl == current_function_decl)
2650 cfun->recursive_call_emit = true;
2652 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2654 function_call_count++;
2656 /* We want to make two insn chains; one for a sibling call, the other
2657 for a normal call. We will select one of the two chains after
2658 initial RTL generation is complete. */
2659 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2661 int sibcall_failure = 0;
2662 /* We want to emit any pending stack adjustments before the tail
2663 recursion "call". That way we know any adjustment after the tail
2664 recursion call can be ignored if we indeed use the tail recursion
2665 call expansion. */
2666 int save_pending_stack_adjust = 0;
2667 int save_stack_pointer_delta = 0;
2668 rtx insns;
2669 rtx before_call, next_arg_reg;
2671 if (pass == 0)
2673 /* Emit any queued insns now; otherwise they would end up in
2674 only one of the alternates. */
2675 emit_queue ();
2677 /* State variables we need to save and restore between
2678 iterations. */
2679 save_pending_stack_adjust = pending_stack_adjust;
2680 save_stack_pointer_delta = stack_pointer_delta;
2682 if (pass)
2683 flags &= ~ECF_SIBCALL;
2684 else
2685 flags |= ECF_SIBCALL;
2687 /* Other state variables that we must reinitialize each time
2688 through the loop (that are not initialized by the loop itself). */
2689 argblock = 0;
2690 call_fusage = 0;
2692 /* Start a new sequence for the normal call case.
2694 From this point on, if the sibling call fails, we want to set
2695 sibcall_failure instead of continuing the loop. */
2696 start_sequence ();
2698 if (pass == 0)
2700 /* We know at this point that there are not currently any
2701 pending cleanups. If, however, in the process of evaluating
2702 the arguments we were to create some, we'll need to be
2703 able to get rid of them. */
2704 expand_start_target_temps ();
2707 /* Don't let pending stack adjusts add up to too much.
2708 Also, do all pending adjustments now if there is any chance
2709 this might be a call to alloca or if we are expanding a sibling
2710 call sequence or if we are calling a function that is to return
2711 with stack pointer depressed. */
2712 if (pending_stack_adjust >= 32
2713 || (pending_stack_adjust > 0
2714 && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
2715 || pass == 0)
2716 do_pending_stack_adjust ();
2718 /* When calling a const function, we must pop the stack args right away,
2719 so that the pop is deleted or moved with the call. */
2720 if (pass && (flags & ECF_LIBCALL_BLOCK))
2721 NO_DEFER_POP;
2723 #ifdef FINAL_REG_PARM_STACK_SPACE
2724 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2725 args_size.var);
2726 #endif
2727 /* Precompute any arguments as needed. */
2728 if (pass)
2729 precompute_arguments (flags, num_actuals, args);
2731 /* Now we are about to start emitting insns that can be deleted
2732 if a libcall is deleted. */
2733 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
2734 start_sequence ();
2736 adjusted_args_size = args_size;
2737 /* Compute the actual size of the argument block required. The variable
2738 and constant sizes must be combined, the size may have to be rounded,
2739 and there may be a minimum required size. When generating a sibcall
2740 pattern, do not round up, since we'll be re-using whatever space our
2741 caller provided. */
2742 unadjusted_args_size
2743 = compute_argument_block_size (reg_parm_stack_space,
2744 &adjusted_args_size,
2745 (pass == 0 ? 0
2746 : preferred_stack_boundary));
2748 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2750 /* The argument block when performing a sibling call is the
2751 incoming argument block. */
2752 if (pass == 0)
2754 argblock = virtual_incoming_args_rtx;
2755 argblock
2756 #ifdef STACK_GROWS_DOWNWARD
2757 = plus_constant (argblock, current_function_pretend_args_size);
2758 #else
2759 = plus_constant (argblock, -current_function_pretend_args_size);
2760 #endif
2761 stored_args_map = sbitmap_alloc (args_size.constant);
2762 sbitmap_zero (stored_args_map);
2765 /* If we have no actual push instructions, or shouldn't use them,
2766 make space for all args right now. */
2767 else if (adjusted_args_size.var != 0)
2769 if (old_stack_level == 0)
2771 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2772 old_stack_pointer_delta = stack_pointer_delta;
2773 old_pending_adj = pending_stack_adjust;
2774 pending_stack_adjust = 0;
2775 /* stack_arg_under_construction says whether a stack arg is
2776 being constructed at the old stack level. Pushing the stack
2777 gets a clean outgoing argument block. */
2778 old_stack_arg_under_construction = stack_arg_under_construction;
2779 stack_arg_under_construction = 0;
2781 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2783 else
2785 /* Note that we must go through the motions of allocating an argument
2786 block even if the size is zero because we may be storing args
2787 in the area reserved for register arguments, which may be part of
2788 the stack frame. */
2790 int needed = adjusted_args_size.constant;
2792 /* Store the maximum argument space used. It will be pushed by
2793 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2794 checking). */
2796 if (needed > current_function_outgoing_args_size)
2797 current_function_outgoing_args_size = needed;
2799 if (must_preallocate)
2801 if (ACCUMULATE_OUTGOING_ARGS)
2803 /* Since the stack pointer will never be pushed, it is
2804 possible for the evaluation of a parm to clobber
2805 something we have already written to the stack.
2806 Since most function calls on RISC machines do not use
2807 the stack, this is uncommon, but must work correctly.
2809 Therefore, we save any area of the stack that was already
2810 written and that we are using. Here we set up to do this
2811 by making a new stack usage map from the old one. The
2812 actual save will be done by store_one_arg.
2814 Another approach might be to try to reorder the argument
2815 evaluations to avoid this conflicting stack usage. */
2817 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2818 /* Since we will be writing into the entire argument area,
2819 the map must be allocated for its entire size, not just
2820 the part that is the responsibility of the caller. */
2821 needed += reg_parm_stack_space;
2822 #endif
2824 #ifdef ARGS_GROW_DOWNWARD
2825 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2826 needed + 1);
2827 #else
2828 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2829 needed);
2830 #endif
2831 stack_usage_map = alloca (highest_outgoing_arg_in_use);
2833 if (initial_highest_arg_in_use)
2834 memcpy (stack_usage_map, initial_stack_usage_map,
2835 initial_highest_arg_in_use);
2837 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2838 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2839 (highest_outgoing_arg_in_use
2840 - initial_highest_arg_in_use));
2841 needed = 0;
2843 /* The address of the outgoing argument list must not be
2844 copied to a register here, because argblock would be left
2845 pointing to the wrong place after the call to
2846 allocate_dynamic_stack_space below. */
2848 argblock = virtual_outgoing_args_rtx;
2850 else
2852 if (inhibit_defer_pop == 0)
2854 /* Try to reuse some or all of the pending_stack_adjust
2855 to get this space. */
2856 needed
2857 = (combine_pending_stack_adjustment_and_call
2858 (unadjusted_args_size,
2859 &adjusted_args_size,
2860 preferred_unit_stack_boundary));
2862 /* combine_pending_stack_adjustment_and_call computes
2863 an adjustment before the arguments are allocated.
2864 Account for them and see whether or not the stack
2865 needs to go up or down. */
2866 needed = unadjusted_args_size - needed;
2868 if (needed < 0)
2870 /* We're releasing stack space. */
2871 /* ??? We can avoid any adjustment at all if we're
2872 already aligned. FIXME. */
2873 pending_stack_adjust = -needed;
2874 do_pending_stack_adjust ();
2875 needed = 0;
2877 else
2878 /* We need to allocate space. We'll do that in
2879 push_block below. */
2880 pending_stack_adjust = 0;
2883 /* Special case this because overhead of `push_block' in
2884 this case is non-trivial. */
2885 if (needed == 0)
2886 argblock = virtual_outgoing_args_rtx;
2887 else
2889 argblock = push_block (GEN_INT (needed), 0, 0);
2890 #ifdef ARGS_GROW_DOWNWARD
2891 argblock = plus_constant (argblock, needed);
2892 #endif
2895 /* We only really need to call `copy_to_reg' in the case
2896 where push insns are going to be used to pass ARGBLOCK
2897 to a function call in ARGS. In that case, the stack
2898 pointer changes value from the allocation point to the
2899 call point, and hence the value of
2900 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2901 as well always do it. */
2902 argblock = copy_to_reg (argblock);
2907 if (ACCUMULATE_OUTGOING_ARGS)
2909 /* The save/restore code in store_one_arg handles all
2910 cases except one: a constructor call (including a C
2911 function returning a BLKmode struct) to initialize
2912 an argument. */
2913 if (stack_arg_under_construction)
2915 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2916 rtx push_size = GEN_INT (reg_parm_stack_space
2917 + adjusted_args_size.constant);
2918 #else
2919 rtx push_size = GEN_INT (adjusted_args_size.constant);
2920 #endif
2921 if (old_stack_level == 0)
2923 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2924 NULL_RTX);
2925 old_stack_pointer_delta = stack_pointer_delta;
2926 old_pending_adj = pending_stack_adjust;
2927 pending_stack_adjust = 0;
2928 /* stack_arg_under_construction says whether a stack
2929 arg is being constructed at the old stack level.
2930 Pushing the stack gets a clean outgoing argument
2931 block. */
2932 old_stack_arg_under_construction
2933 = stack_arg_under_construction;
2934 stack_arg_under_construction = 0;
2935 /* Make a new map for the new argument list. */
2936 stack_usage_map = alloca (highest_outgoing_arg_in_use);
2937 memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
2938 highest_outgoing_arg_in_use = 0;
2940 allocate_dynamic_stack_space (push_size, NULL_RTX,
2941 BITS_PER_UNIT);
2944 /* If argument evaluation might modify the stack pointer,
2945 copy the address of the argument list to a register. */
2946 for (i = 0; i < num_actuals; i++)
2947 if (args[i].pass_on_stack)
2949 argblock = copy_addr_to_reg (argblock);
2950 break;
2954 compute_argument_addresses (args, argblock, num_actuals);
2956 /* If we push args individually in reverse order, perform stack alignment
2957 before the first push (the last arg). */
2958 if (PUSH_ARGS_REVERSED && argblock == 0
2959 && adjusted_args_size.constant != unadjusted_args_size)
2961 /* When the stack adjustment is pending, we get better code
2962 by combining the adjustments. */
2963 if (pending_stack_adjust
2964 && ! (flags & ECF_LIBCALL_BLOCK)
2965 && ! inhibit_defer_pop)
2967 pending_stack_adjust
2968 = (combine_pending_stack_adjustment_and_call
2969 (unadjusted_args_size,
2970 &adjusted_args_size,
2971 preferred_unit_stack_boundary));
2972 do_pending_stack_adjust ();
2974 else if (argblock == 0)
2975 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2976 - unadjusted_args_size));
2978 /* Now that the stack is properly aligned, pops can't safely
2979 be deferred during the evaluation of the arguments. */
2980 NO_DEFER_POP;
2982 funexp = rtx_for_function_call (fndecl, addr);
2984 /* Figure out the register where the value, if any, will come back. */
2985 valreg = 0;
2986 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2987 && ! structure_value_addr)
2989 if (pcc_struct_value)
2990 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2991 fndecl, (pass == 0));
2992 else
2993 valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
2996 /* Precompute all register parameters. It isn't safe to compute anything
2997 once we have started filling any specific hard regs. */
2998 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
3000 #ifdef REG_PARM_STACK_SPACE
3001 /* Save the fixed argument area if it's part of the caller's frame and
3002 is clobbered by argument setup for this call. */
3003 if (ACCUMULATE_OUTGOING_ARGS && pass)
3004 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3005 &low_to_save, &high_to_save);
3006 #endif
3008 /* Now store (and compute if necessary) all non-register parms.
3009 These come before register parms, since they can require block-moves,
3010 which could clobber the registers used for register parms.
3011 Parms which have partial registers are not stored here,
3012 but we do preallocate space here if they want that. */
3014 for (i = 0; i < num_actuals; i++)
3015 if (args[i].reg == 0 || args[i].pass_on_stack)
3017 rtx before_arg = get_last_insn ();
3019 if (store_one_arg (&args[i], argblock, flags,
3020 adjusted_args_size.var != 0,
3021 reg_parm_stack_space)
3022 || (pass == 0
3023 && check_sibcall_argument_overlap (before_arg,
3024 &args[i], 1)))
3025 sibcall_failure = 1;
3028 /* If we have a parm that is passed in registers but not in memory
3029 and whose alignment does not permit a direct copy into registers,
3030 make a group of pseudos that correspond to each register that we
3031 will later fill. */
3032 if (STRICT_ALIGNMENT)
3033 store_unaligned_arguments_into_pseudos (args, num_actuals);
3035 /* Now store any partially-in-registers parm.
3036 This is the last place a block-move can happen. */
3037 if (reg_parm_seen)
3038 for (i = 0; i < num_actuals; i++)
3039 if (args[i].partial != 0 && ! args[i].pass_on_stack)
3041 rtx before_arg = get_last_insn ();
3043 if (store_one_arg (&args[i], argblock, flags,
3044 adjusted_args_size.var != 0,
3045 reg_parm_stack_space)
3046 || (pass == 0
3047 && check_sibcall_argument_overlap (before_arg,
3048 &args[i], 1)))
3049 sibcall_failure = 1;
3052 /* If we pushed args in forward order, perform stack alignment
3053 after pushing the last arg. */
3054 if (!PUSH_ARGS_REVERSED && argblock == 0)
3055 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
3056 - unadjusted_args_size));
3058 /* If register arguments require space on the stack and stack space
3059 was not preallocated, allocate stack space here for arguments
3060 passed in registers. */
3061 #ifdef OUTGOING_REG_PARM_STACK_SPACE
3062 if (!ACCUMULATE_OUTGOING_ARGS
3063 && must_preallocate == 0 && reg_parm_stack_space > 0)
3064 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
3065 #endif
3067 /* Pass the function the address in which to return a
3068 structure value. */
3069 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3071 structure_value_addr
3072 = convert_memory_address (Pmode, structure_value_addr);
3073 emit_move_insn (struct_value,
3074 force_reg (Pmode,
3075 force_operand (structure_value_addr,
3076 NULL_RTX)));
3078 if (GET_CODE (struct_value) == REG)
3079 use_reg (&call_fusage, struct_value);
3082 funexp = prepare_call_address (funexp, fndecl, &call_fusage,
3083 reg_parm_seen, pass == 0);
3085 load_register_parameters (args, num_actuals, &call_fusage, flags,
3086 pass == 0, &sibcall_failure);
3088 /* Perform postincrements before actually calling the function. */
3089 emit_queue ();
3091 /* Save a pointer to the last insn before the call, so that we can
3092 later safely search backwards to find the CALL_INSN. */
3093 before_call = get_last_insn ();
3095 /* Set up next argument register. For sibling calls on machines
3096 with register windows this should be the incoming register. */
3097 #ifdef FUNCTION_INCOMING_ARG
3098 if (pass == 0)
3099 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
3100 void_type_node, 1);
3101 else
3102 #endif
3103 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
3104 void_type_node, 1);
3106 /* All arguments and registers used for the call must be set up by
3107 now! */
3109 /* Stack must be properly aligned now. */
3110 if (pass && stack_pointer_delta % preferred_unit_stack_boundary)
3111 abort ();
3113 /* Generate the actual call instruction. */
3114 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
3115 adjusted_args_size.constant, struct_value_size,
3116 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
3117 flags, & args_so_far);
3119 /* If call is cse'able, make appropriate pair of reg-notes around it.
3120 Test valreg so we don't crash; may safely ignore `const'
3121 if return type is void. Disable for PARALLEL return values, because
3122 we have no way to move such values into a pseudo register. */
3123 if (pass && (flags & ECF_LIBCALL_BLOCK))
3125 rtx insns;
3126 rtx insn;
3127 bool failed = valreg == 0 || GET_CODE (valreg) == PARALLEL;
3129 insns = get_insns ();
3131 /* Expansion of block moves possibly introduced a loop that may
3132 not appear inside libcall block. */
3133 for (insn = insns; insn; insn = NEXT_INSN (insn))
3134 if (GET_CODE (insn) == JUMP_INSN)
3135 failed = true;
3137 if (failed)
3139 end_sequence ();
3140 emit_insn (insns);
3142 else
3144 rtx note = 0;
3145 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3147 /* Mark the return value as a pointer if needed. */
3148 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3149 mark_reg_pointer (temp,
3150 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
3152 /* Construct an "equal form" for the value which mentions all the
3153 arguments in order as well as the function name. */
3154 for (i = 0; i < num_actuals; i++)
3155 note = gen_rtx_EXPR_LIST (VOIDmode,
3156 args[i].initial_value, note);
3157 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
3159 end_sequence ();
3161 if (flags & ECF_PURE)
3162 note = gen_rtx_EXPR_LIST (VOIDmode,
3163 gen_rtx_USE (VOIDmode,
3164 gen_rtx_MEM (BLKmode,
3165 gen_rtx_SCRATCH (VOIDmode))),
3166 note);
3168 emit_libcall_block (insns, temp, valreg, note);
3170 valreg = temp;
3173 else if (pass && (flags & ECF_MALLOC))
3175 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3176 rtx last, insns;
3178 /* The return value from a malloc-like function is a pointer. */
3179 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3180 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
3182 emit_move_insn (temp, valreg);
3184 /* The return value from a malloc-like function can not alias
3185 anything else. */
3186 last = get_last_insn ();
3187 REG_NOTES (last) =
3188 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
3190 /* Write out the sequence. */
3191 insns = get_insns ();
3192 end_sequence ();
3193 emit_insn (insns);
3194 valreg = temp;
3197 /* For calls to `setjmp', etc., inform flow.c it should complain
3198 if nonvolatile values are live. For functions that cannot return,
3199 inform flow that control does not fall through. */
3201 if ((flags & (ECF_NORETURN | ECF_LONGJMP)) || pass == 0)
3203 /* The barrier must be emitted
3204 immediately after the CALL_INSN. Some ports emit more
3205 than just a CALL_INSN above, so we must search for it here. */
3207 rtx last = get_last_insn ();
3208 while (GET_CODE (last) != CALL_INSN)
3210 last = PREV_INSN (last);
3211 /* There was no CALL_INSN? */
3212 if (last == before_call)
3213 abort ();
3216 emit_barrier_after (last);
3218 /* Stack adjustments after a noreturn call are dead code.
3219 However when NO_DEFER_POP is in effect, we must preserve
3220 stack_pointer_delta. */
3221 if (inhibit_defer_pop == 0)
3223 stack_pointer_delta = old_stack_allocated;
3224 pending_stack_adjust = 0;
3228 if (flags & ECF_LONGJMP)
3229 current_function_calls_longjmp = 1;
3231 /* If value type not void, return an rtx for the value. */
3233 /* If there are cleanups to be called, don't use a hard reg as target.
3234 We need to double check this and see if it matters anymore. */
3235 if (any_pending_cleanups ())
3237 if (target && REG_P (target)
3238 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3239 target = 0;
3240 sibcall_failure = 1;
3243 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
3244 || ignore)
3245 target = const0_rtx;
3246 else if (structure_value_addr)
3248 if (target == 0 || GET_CODE (target) != MEM)
3250 target
3251 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3252 memory_address (TYPE_MODE (TREE_TYPE (exp)),
3253 structure_value_addr));
3254 set_mem_attributes (target, exp, 1);
3257 else if (pcc_struct_value)
3259 /* This is the special C++ case where we need to
3260 know what the true target was. We take care to
3261 never use this value more than once in one expression. */
3262 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3263 copy_to_reg (valreg));
3264 set_mem_attributes (target, exp, 1);
3266 /* Handle calls that return values in multiple non-contiguous locations.
3267 The Irix 6 ABI has examples of this. */
3268 else if (GET_CODE (valreg) == PARALLEL)
3270 /* Second condition is added because "target" is freed at the
3271 the end of "pass0" for -O2 when call is made to
3272 expand_end_target_temps (). Its "in_use" flag has been set
3273 to false, so allocate a new temp. */
3274 if (target == 0 || (pass == 1 && target == temp_target))
3276 /* This will only be assigned once, so it can be readonly. */
3277 tree nt = build_qualified_type (TREE_TYPE (exp),
3278 (TYPE_QUALS (TREE_TYPE (exp))
3279 | TYPE_QUAL_CONST));
3281 target = assign_temp (nt, 0, 1, 1);
3282 temp_target = target;
3283 preserve_temp_slots (target);
3286 if (! rtx_equal_p (target, valreg))
3287 emit_group_store (target, valreg, TREE_TYPE (exp),
3288 int_size_in_bytes (TREE_TYPE (exp)));
3290 /* We can not support sibling calls for this case. */
3291 sibcall_failure = 1;
3293 else if (target
3294 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
3295 && GET_MODE (target) == GET_MODE (valreg))
3297 /* TARGET and VALREG cannot be equal at this point because the
3298 latter would not have REG_FUNCTION_VALUE_P true, while the
3299 former would if it were referring to the same register.
3301 If they refer to the same register, this move will be a no-op,
3302 except when function inlining is being done. */
3303 emit_move_insn (target, valreg);
3305 /* If we are setting a MEM, this code must be executed. Since it is
3306 emitted after the call insn, sibcall optimization cannot be
3307 performed in that case. */
3308 if (GET_CODE (target) == MEM)
3309 sibcall_failure = 1;
3311 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
3313 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
3315 /* We can not support sibling calls for this case. */
3316 sibcall_failure = 1;
3318 else
3320 if (shift_returned_value (TREE_TYPE (exp), &valreg))
3321 sibcall_failure = 1;
3323 target = copy_to_reg (valreg);
3326 if (targetm.calls.promote_function_return(funtype))
3328 /* If we promoted this return value, make the proper SUBREG. TARGET
3329 might be const0_rtx here, so be careful. */
3330 if (GET_CODE (target) == REG
3331 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3332 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3334 tree type = TREE_TYPE (exp);
3335 int unsignedp = TREE_UNSIGNED (type);
3336 int offset = 0;
3338 /* If we don't promote as expected, something is wrong. */
3339 if (GET_MODE (target)
3340 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
3341 abort ();
3343 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3344 && GET_MODE_SIZE (GET_MODE (target))
3345 > GET_MODE_SIZE (TYPE_MODE (type)))
3347 offset = GET_MODE_SIZE (GET_MODE (target))
3348 - GET_MODE_SIZE (TYPE_MODE (type));
3349 if (! BYTES_BIG_ENDIAN)
3350 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3351 else if (! WORDS_BIG_ENDIAN)
3352 offset %= UNITS_PER_WORD;
3354 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3355 SUBREG_PROMOTED_VAR_P (target) = 1;
3356 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3360 /* If size of args is variable or this was a constructor call for a stack
3361 argument, restore saved stack-pointer value. */
3363 if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
3365 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3366 stack_pointer_delta = old_stack_pointer_delta;
3367 pending_stack_adjust = old_pending_adj;
3368 stack_arg_under_construction = old_stack_arg_under_construction;
3369 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3370 stack_usage_map = initial_stack_usage_map;
3371 sibcall_failure = 1;
3373 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3375 #ifdef REG_PARM_STACK_SPACE
3376 if (save_area)
3377 restore_fixed_argument_area (save_area, argblock,
3378 high_to_save, low_to_save);
3379 #endif
3381 /* If we saved any argument areas, restore them. */
3382 for (i = 0; i < num_actuals; i++)
3383 if (args[i].save_area)
3385 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3386 rtx stack_area
3387 = gen_rtx_MEM (save_mode,
3388 memory_address (save_mode,
3389 XEXP (args[i].stack_slot, 0)));
3391 if (save_mode != BLKmode)
3392 emit_move_insn (stack_area, args[i].save_area);
3393 else
3394 emit_block_move (stack_area, args[i].save_area,
3395 GEN_INT (args[i].locate.size.constant),
3396 BLOCK_OP_CALL_PARM);
3399 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3400 stack_usage_map = initial_stack_usage_map;
3403 /* If this was alloca, record the new stack level for nonlocal gotos.
3404 Check for the handler slots since we might not have a save area
3405 for non-local gotos. */
3407 if ((flags & ECF_MAY_BE_ALLOCA) && nonlocal_goto_handler_slots != 0)
3408 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
3410 /* Free up storage we no longer need. */
3411 for (i = 0; i < num_actuals; ++i)
3412 if (args[i].aligned_regs)
3413 free (args[i].aligned_regs);
3415 if (pass == 0)
3417 /* Undo the fake expand_start_target_temps we did earlier. If
3418 there had been any cleanups created, we've already set
3419 sibcall_failure. */
3420 expand_end_target_temps ();
3423 /* If this function is returning into a memory location marked as
3424 readonly, it means it is initializing that location. We normally treat
3425 functions as not clobbering such locations, so we need to specify that
3426 this one does. We do this by adding the appropriate CLOBBER to the
3427 CALL_INSN function usage list. This cannot be done by emitting a
3428 standalone CLOBBER after the call because the latter would be ignored
3429 by at least the delay slot scheduling pass. We do this now instead of
3430 adding to call_fusage before the call to emit_call_1 because TARGET
3431 may be modified in the meantime. */
3432 if (structure_value_addr != 0 && target != 0
3433 && GET_CODE (target) == MEM && RTX_UNCHANGING_P (target))
3434 add_function_usage_to
3435 (last_call_insn (),
3436 gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_CLOBBER (VOIDmode, target),
3437 NULL_RTX));
3439 insns = get_insns ();
3440 end_sequence ();
3442 if (pass == 0)
3444 tail_call_insns = insns;
3446 /* Restore the pending stack adjustment now that we have
3447 finished generating the sibling call sequence. */
3449 pending_stack_adjust = save_pending_stack_adjust;
3450 stack_pointer_delta = save_stack_pointer_delta;
3452 /* Prepare arg structure for next iteration. */
3453 for (i = 0; i < num_actuals; i++)
3455 args[i].value = 0;
3456 args[i].aligned_regs = 0;
3457 args[i].stack = 0;
3460 sbitmap_free (stored_args_map);
3462 else
3464 normal_call_insns = insns;
3466 /* Verify that we've deallocated all the stack we used. */
3467 if (! (flags & (ECF_NORETURN | ECF_LONGJMP))
3468 && old_stack_allocated != stack_pointer_delta
3469 - pending_stack_adjust)
3470 abort ();
3473 /* If something prevents making this a sibling call,
3474 zero out the sequence. */
3475 if (sibcall_failure)
3476 tail_call_insns = NULL_RTX;
3479 /* The function optimize_sibling_and_tail_recursive_calls doesn't
3480 handle CALL_PLACEHOLDERs inside other CALL_PLACEHOLDERs. This
3481 can happen if the arguments to this function call an inline
3482 function who's expansion contains another CALL_PLACEHOLDER.
3484 If there are any C_Ps in any of these sequences, replace them
3485 with their normal call. */
3487 for (insn = normal_call_insns; insn; insn = NEXT_INSN (insn))
3488 if (GET_CODE (insn) == CALL_INSN
3489 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3490 replace_call_placeholder (insn, sibcall_use_normal);
3492 for (insn = tail_call_insns; insn; insn = NEXT_INSN (insn))
3493 if (GET_CODE (insn) == CALL_INSN
3494 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3495 replace_call_placeholder (insn, sibcall_use_normal);
3497 for (insn = tail_recursion_insns; insn; insn = NEXT_INSN (insn))
3498 if (GET_CODE (insn) == CALL_INSN
3499 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3500 replace_call_placeholder (insn, sibcall_use_normal);
3502 /* If this was a potential tail recursion site, then emit a
3503 CALL_PLACEHOLDER with the normal and the tail recursion streams.
3504 One of them will be selected later. */
3505 if (tail_recursion_insns || tail_call_insns)
3507 /* The tail recursion label must be kept around. We could expose
3508 its use in the CALL_PLACEHOLDER, but that creates unwanted edges
3509 and makes determining true tail recursion sites difficult.
3511 So we set LABEL_PRESERVE_P here, then clear it when we select
3512 one of the call sequences after rtl generation is complete. */
3513 if (tail_recursion_insns)
3514 LABEL_PRESERVE_P (tail_recursion_label) = 1;
3515 emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode, normal_call_insns,
3516 tail_call_insns,
3517 tail_recursion_insns,
3518 tail_recursion_label));
3520 else
3521 emit_insn (normal_call_insns);
3523 currently_expanding_call--;
3525 /* If this function returns with the stack pointer depressed, ensure
3526 this block saves and restores the stack pointer, show it was
3527 changed, and adjust for any outgoing arg space. */
3528 if (flags & ECF_SP_DEPRESSED)
3530 clear_pending_stack_adjust ();
3531 emit_insn (gen_rtx (CLOBBER, VOIDmode, stack_pointer_rtx));
3532 emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3533 save_stack_pointer ();
3536 return target;
3539 /* Traverse an argument list in VALUES and expand all complex
3540 arguments into their components. */
3541 tree
3542 split_complex_values (tree values)
3544 tree p;
3546 values = copy_list (values);
3548 for (p = values; p; p = TREE_CHAIN (p))
3550 tree complex_value = TREE_VALUE (p);
3551 tree complex_type;
3553 complex_type = TREE_TYPE (complex_value);
3554 if (!complex_type)
3555 continue;
3557 if (TREE_CODE (complex_type) == COMPLEX_TYPE)
3559 tree subtype;
3560 tree real, imag, next;
3562 subtype = TREE_TYPE (complex_type);
3563 complex_value = save_expr (complex_value);
3564 real = build1 (REALPART_EXPR, subtype, complex_value);
3565 imag = build1 (IMAGPART_EXPR, subtype, complex_value);
3567 TREE_VALUE (p) = real;
3568 next = TREE_CHAIN (p);
3569 imag = build_tree_list (NULL_TREE, imag);
3570 TREE_CHAIN (p) = imag;
3571 TREE_CHAIN (imag) = next;
3573 /* Skip the newly created node. */
3574 p = TREE_CHAIN (p);
3578 return values;
3581 /* Traverse a list of TYPES and expand all complex types into their
3582 components. */
3583 tree
3584 split_complex_types (tree types)
3586 tree p;
3588 types = copy_list (types);
3590 for (p = types; p; p = TREE_CHAIN (p))
3592 tree complex_type = TREE_VALUE (p);
3594 if (TREE_CODE (complex_type) == COMPLEX_TYPE)
3596 tree next, imag;
3598 /* Rewrite complex type with component type. */
3599 TREE_VALUE (p) = TREE_TYPE (complex_type);
3600 next = TREE_CHAIN (p);
3602 /* Add another component type for the imaginary part. */
3603 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3604 TREE_CHAIN (p) = imag;
3605 TREE_CHAIN (imag) = next;
3607 /* Skip the newly created node. */
3608 p = TREE_CHAIN (p);
3612 return types;
3615 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3616 The RETVAL parameter specifies whether return value needs to be saved, other
3617 parameters are documented in the emit_library_call function below. */
3619 static rtx
3620 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3621 enum libcall_type fn_type,
3622 enum machine_mode outmode, int nargs, va_list p)
3624 /* Total size in bytes of all the stack-parms scanned so far. */
3625 struct args_size args_size;
3626 /* Size of arguments before any adjustments (such as rounding). */
3627 struct args_size original_args_size;
3628 int argnum;
3629 rtx fun;
3630 int inc;
3631 int count;
3632 rtx argblock = 0;
3633 CUMULATIVE_ARGS args_so_far;
3634 struct arg
3636 rtx value;
3637 enum machine_mode mode;
3638 rtx reg;
3639 int partial;
3640 struct locate_and_pad_arg_data locate;
3641 rtx save_area;
3643 struct arg *argvec;
3644 int old_inhibit_defer_pop = inhibit_defer_pop;
3645 rtx call_fusage = 0;
3646 rtx mem_value = 0;
3647 rtx valreg;
3648 int pcc_struct_value = 0;
3649 int struct_value_size = 0;
3650 int flags;
3651 int reg_parm_stack_space = 0;
3652 int needed;
3653 rtx before_call;
3654 tree tfom; /* type_for_mode (outmode, 0) */
3656 #ifdef REG_PARM_STACK_SPACE
3657 /* Define the boundary of the register parm stack space that needs to be
3658 save, if any. */
3659 int low_to_save, high_to_save;
3660 rtx save_area = 0; /* Place that it is saved. */
3661 #endif
3663 /* Size of the stack reserved for parameter registers. */
3664 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3665 char *initial_stack_usage_map = stack_usage_map;
3667 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3669 #ifdef REG_PARM_STACK_SPACE
3670 #ifdef MAYBE_REG_PARM_STACK_SPACE
3671 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3672 #else
3673 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3674 #endif
3675 #endif
3677 /* By default, library functions can not throw. */
3678 flags = ECF_NOTHROW;
3680 switch (fn_type)
3682 case LCT_NORMAL:
3683 break;
3684 case LCT_CONST:
3685 flags |= ECF_CONST;
3686 break;
3687 case LCT_PURE:
3688 flags |= ECF_PURE;
3689 break;
3690 case LCT_CONST_MAKE_BLOCK:
3691 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
3692 break;
3693 case LCT_PURE_MAKE_BLOCK:
3694 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
3695 break;
3696 case LCT_NORETURN:
3697 flags |= ECF_NORETURN;
3698 break;
3699 case LCT_THROW:
3700 flags = ECF_NORETURN;
3701 break;
3702 case LCT_ALWAYS_RETURN:
3703 flags = ECF_ALWAYS_RETURN;
3704 break;
3705 case LCT_RETURNS_TWICE:
3706 flags = ECF_RETURNS_TWICE;
3707 break;
3709 fun = orgfun;
3711 /* Ensure current function's preferred stack boundary is at least
3712 what we need. */
3713 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3714 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3716 /* If this kind of value comes back in memory,
3717 decide where in memory it should come back. */
3718 if (outmode != VOIDmode)
3720 tfom = (*lang_hooks.types.type_for_mode) (outmode, 0);
3721 if (aggregate_value_p (tfom, 0))
3723 #ifdef PCC_STATIC_STRUCT_RETURN
3724 rtx pointer_reg
3725 = hard_function_value (build_pointer_type (tfom), 0, 0);
3726 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3727 pcc_struct_value = 1;
3728 if (value == 0)
3729 value = gen_reg_rtx (outmode);
3730 #else /* not PCC_STATIC_STRUCT_RETURN */
3731 struct_value_size = GET_MODE_SIZE (outmode);
3732 if (value != 0 && GET_CODE (value) == MEM)
3733 mem_value = value;
3734 else
3735 mem_value = assign_temp (tfom, 0, 1, 1);
3736 #endif
3737 /* This call returns a big structure. */
3738 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3741 else
3742 tfom = void_type_node;
3744 /* ??? Unfinished: must pass the memory address as an argument. */
3746 /* Copy all the libcall-arguments out of the varargs data
3747 and into a vector ARGVEC.
3749 Compute how to pass each argument. We only support a very small subset
3750 of the full argument passing conventions to limit complexity here since
3751 library functions shouldn't have many args. */
3753 argvec = alloca ((nargs + 1) * sizeof (struct arg));
3754 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3756 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3757 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3758 #else
3759 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3760 #endif
3762 args_size.constant = 0;
3763 args_size.var = 0;
3765 count = 0;
3767 /* Now we are about to start emitting insns that can be deleted
3768 if a libcall is deleted. */
3769 if (flags & ECF_LIBCALL_BLOCK)
3770 start_sequence ();
3772 push_temp_slots ();
3774 /* If there's a structure value address to be passed,
3775 either pass it in the special place, or pass it as an extra argument. */
3776 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3778 rtx addr = XEXP (mem_value, 0);
3779 nargs++;
3781 /* Make sure it is a reasonable operand for a move or push insn. */
3782 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3783 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3784 addr = force_operand (addr, NULL_RTX);
3786 argvec[count].value = addr;
3787 argvec[count].mode = Pmode;
3788 argvec[count].partial = 0;
3790 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3791 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3792 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3793 abort ();
3794 #endif
3796 locate_and_pad_parm (Pmode, NULL_TREE,
3797 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3799 #else
3800 argvec[count].reg != 0,
3801 #endif
3802 0, NULL_TREE, &args_size, &argvec[count].locate);
3804 if (argvec[count].reg == 0 || argvec[count].partial != 0
3805 || reg_parm_stack_space > 0)
3806 args_size.constant += argvec[count].locate.size.constant;
3808 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3810 count++;
3813 for (; count < nargs; count++)
3815 rtx val = va_arg (p, rtx);
3816 enum machine_mode mode = va_arg (p, enum machine_mode);
3818 /* We cannot convert the arg value to the mode the library wants here;
3819 must do it earlier where we know the signedness of the arg. */
3820 if (mode == BLKmode
3821 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3822 abort ();
3824 /* There's no need to call protect_from_queue, because
3825 either emit_move_insn or emit_push_insn will do that. */
3827 /* Make sure it is a reasonable operand for a move or push insn. */
3828 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3829 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3830 val = force_operand (val, NULL_RTX);
3832 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3833 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3835 rtx slot;
3836 int must_copy = 1
3837 #ifdef FUNCTION_ARG_CALLEE_COPIES
3838 && ! FUNCTION_ARG_CALLEE_COPIES (args_so_far, mode,
3839 NULL_TREE, 1)
3840 #endif
3843 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3844 functions, so we have to pretend this isn't such a function. */
3845 if (flags & ECF_LIBCALL_BLOCK)
3847 rtx insns = get_insns ();
3848 end_sequence ();
3849 emit_insn (insns);
3851 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3853 /* If this was a CONST function, it is now PURE since
3854 it now reads memory. */
3855 if (flags & ECF_CONST)
3857 flags &= ~ECF_CONST;
3858 flags |= ECF_PURE;
3861 if (GET_MODE (val) == MEM && ! must_copy)
3862 slot = val;
3863 else if (must_copy)
3865 slot = assign_temp ((*lang_hooks.types.type_for_mode) (mode, 0),
3866 0, 1, 1);
3867 emit_move_insn (slot, val);
3869 else
3871 tree type = (*lang_hooks.types.type_for_mode) (mode, 0);
3873 slot
3874 = gen_rtx_MEM (mode,
3875 expand_expr (build1 (ADDR_EXPR,
3876 build_pointer_type (type),
3877 make_tree (type, val)),
3878 NULL_RTX, VOIDmode, 0));
3881 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3882 gen_rtx_USE (VOIDmode, slot),
3883 call_fusage);
3884 if (must_copy)
3885 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3886 gen_rtx_CLOBBER (VOIDmode,
3887 slot),
3888 call_fusage);
3890 mode = Pmode;
3891 val = force_operand (XEXP (slot, 0), NULL_RTX);
3893 #endif
3895 argvec[count].value = val;
3896 argvec[count].mode = mode;
3898 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3900 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3901 argvec[count].partial
3902 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3903 #else
3904 argvec[count].partial = 0;
3905 #endif
3907 locate_and_pad_parm (mode, NULL_TREE,
3908 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3910 #else
3911 argvec[count].reg != 0,
3912 #endif
3913 argvec[count].partial,
3914 NULL_TREE, &args_size, &argvec[count].locate);
3916 if (argvec[count].locate.size.var)
3917 abort ();
3919 if (argvec[count].reg == 0 || argvec[count].partial != 0
3920 || reg_parm_stack_space > 0)
3921 args_size.constant += argvec[count].locate.size.constant;
3923 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3926 #ifdef FINAL_REG_PARM_STACK_SPACE
3927 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3928 args_size.var);
3929 #endif
3930 /* If this machine requires an external definition for library
3931 functions, write one out. */
3932 assemble_external_libcall (fun);
3934 original_args_size = args_size;
3935 args_size.constant = (((args_size.constant
3936 + stack_pointer_delta
3937 + STACK_BYTES - 1)
3938 / STACK_BYTES
3939 * STACK_BYTES)
3940 - stack_pointer_delta);
3942 args_size.constant = MAX (args_size.constant,
3943 reg_parm_stack_space);
3945 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3946 args_size.constant -= reg_parm_stack_space;
3947 #endif
3949 if (args_size.constant > current_function_outgoing_args_size)
3950 current_function_outgoing_args_size = args_size.constant;
3952 if (ACCUMULATE_OUTGOING_ARGS)
3954 /* Since the stack pointer will never be pushed, it is possible for
3955 the evaluation of a parm to clobber something we have already
3956 written to the stack. Since most function calls on RISC machines
3957 do not use the stack, this is uncommon, but must work correctly.
3959 Therefore, we save any area of the stack that was already written
3960 and that we are using. Here we set up to do this by making a new
3961 stack usage map from the old one.
3963 Another approach might be to try to reorder the argument
3964 evaluations to avoid this conflicting stack usage. */
3966 needed = args_size.constant;
3968 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3969 /* Since we will be writing into the entire argument area, the
3970 map must be allocated for its entire size, not just the part that
3971 is the responsibility of the caller. */
3972 needed += reg_parm_stack_space;
3973 #endif
3975 #ifdef ARGS_GROW_DOWNWARD
3976 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3977 needed + 1);
3978 #else
3979 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3980 needed);
3981 #endif
3982 stack_usage_map = alloca (highest_outgoing_arg_in_use);
3984 if (initial_highest_arg_in_use)
3985 memcpy (stack_usage_map, initial_stack_usage_map,
3986 initial_highest_arg_in_use);
3988 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3989 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3990 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3991 needed = 0;
3993 /* We must be careful to use virtual regs before they're instantiated,
3994 and real regs afterwards. Loop optimization, for example, can create
3995 new libcalls after we've instantiated the virtual regs, and if we
3996 use virtuals anyway, they won't match the rtl patterns. */
3998 if (virtuals_instantiated)
3999 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
4000 else
4001 argblock = virtual_outgoing_args_rtx;
4003 else
4005 if (!PUSH_ARGS)
4006 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
4009 /* If we push args individually in reverse order, perform stack alignment
4010 before the first push (the last arg). */
4011 if (argblock == 0 && PUSH_ARGS_REVERSED)
4012 anti_adjust_stack (GEN_INT (args_size.constant
4013 - original_args_size.constant));
4015 if (PUSH_ARGS_REVERSED)
4017 inc = -1;
4018 argnum = nargs - 1;
4020 else
4022 inc = 1;
4023 argnum = 0;
4026 #ifdef REG_PARM_STACK_SPACE
4027 if (ACCUMULATE_OUTGOING_ARGS)
4029 /* The argument list is the property of the called routine and it
4030 may clobber it. If the fixed area has been used for previous
4031 parameters, we must save and restore it. */
4032 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
4033 &low_to_save, &high_to_save);
4035 #endif
4037 /* Push the args that need to be pushed. */
4039 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4040 are to be pushed. */
4041 for (count = 0; count < nargs; count++, argnum += inc)
4043 enum machine_mode mode = argvec[argnum].mode;
4044 rtx val = argvec[argnum].value;
4045 rtx reg = argvec[argnum].reg;
4046 int partial = argvec[argnum].partial;
4047 int lower_bound = 0, upper_bound = 0, i;
4049 if (! (reg != 0 && partial == 0))
4051 if (ACCUMULATE_OUTGOING_ARGS)
4053 /* If this is being stored into a pre-allocated, fixed-size,
4054 stack area, save any previous data at that location. */
4056 #ifdef ARGS_GROW_DOWNWARD
4057 /* stack_slot is negative, but we want to index stack_usage_map
4058 with positive values. */
4059 upper_bound = -argvec[argnum].locate.offset.constant + 1;
4060 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
4061 #else
4062 lower_bound = argvec[argnum].locate.offset.constant;
4063 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
4064 #endif
4066 i = lower_bound;
4067 /* Don't worry about things in the fixed argument area;
4068 it has already been saved. */
4069 if (i < reg_parm_stack_space)
4070 i = reg_parm_stack_space;
4071 while (i < upper_bound && stack_usage_map[i] == 0)
4072 i++;
4074 if (i < upper_bound)
4076 /* We need to make a save area. */
4077 unsigned int size
4078 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
4079 enum machine_mode save_mode
4080 = mode_for_size (size, MODE_INT, 1);
4081 rtx adr
4082 = plus_constant (argblock,
4083 argvec[argnum].locate.offset.constant);
4084 rtx stack_area
4085 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
4087 if (save_mode == BLKmode)
4089 argvec[argnum].save_area
4090 = assign_stack_temp (BLKmode,
4091 argvec[argnum].locate.size.constant,
4094 emit_block_move (validize_mem (argvec[argnum].save_area),
4095 stack_area,
4096 GEN_INT (argvec[argnum].locate.size.constant),
4097 BLOCK_OP_CALL_PARM);
4099 else
4101 argvec[argnum].save_area = gen_reg_rtx (save_mode);
4103 emit_move_insn (argvec[argnum].save_area, stack_area);
4108 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
4109 partial, reg, 0, argblock,
4110 GEN_INT (argvec[argnum].locate.offset.constant),
4111 reg_parm_stack_space,
4112 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
4114 /* Now mark the segment we just used. */
4115 if (ACCUMULATE_OUTGOING_ARGS)
4116 for (i = lower_bound; i < upper_bound; i++)
4117 stack_usage_map[i] = 1;
4119 NO_DEFER_POP;
4123 /* If we pushed args in forward order, perform stack alignment
4124 after pushing the last arg. */
4125 if (argblock == 0 && !PUSH_ARGS_REVERSED)
4126 anti_adjust_stack (GEN_INT (args_size.constant
4127 - original_args_size.constant));
4129 if (PUSH_ARGS_REVERSED)
4130 argnum = nargs - 1;
4131 else
4132 argnum = 0;
4134 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0, 0);
4136 /* Now load any reg parms into their regs. */
4138 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4139 are to be pushed. */
4140 for (count = 0; count < nargs; count++, argnum += inc)
4142 rtx val = argvec[argnum].value;
4143 rtx reg = argvec[argnum].reg;
4144 int partial = argvec[argnum].partial;
4146 /* Handle calls that pass values in multiple non-contiguous
4147 locations. The PA64 has examples of this for library calls. */
4148 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4149 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (GET_MODE (val)));
4150 else if (reg != 0 && partial == 0)
4151 emit_move_insn (reg, val);
4153 NO_DEFER_POP;
4156 /* Any regs containing parms remain in use through the call. */
4157 for (count = 0; count < nargs; count++)
4159 rtx reg = argvec[count].reg;
4160 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4161 use_group_regs (&call_fusage, reg);
4162 else if (reg != 0)
4163 use_reg (&call_fusage, reg);
4166 /* Pass the function the address in which to return a structure value. */
4167 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
4169 emit_move_insn (struct_value,
4170 force_reg (Pmode,
4171 force_operand (XEXP (mem_value, 0),
4172 NULL_RTX)));
4173 if (GET_CODE (struct_value) == REG)
4174 use_reg (&call_fusage, struct_value);
4177 /* Don't allow popping to be deferred, since then
4178 cse'ing of library calls could delete a call and leave the pop. */
4179 NO_DEFER_POP;
4180 valreg = (mem_value == 0 && outmode != VOIDmode
4181 ? hard_libcall_value (outmode) : NULL_RTX);
4183 /* Stack must be properly aligned now. */
4184 if (stack_pointer_delta & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))
4185 abort ();
4187 before_call = get_last_insn ();
4189 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4190 will set inhibit_defer_pop to that value. */
4191 /* The return type is needed to decide how many bytes the function pops.
4192 Signedness plays no role in that, so for simplicity, we pretend it's
4193 always signed. We also assume that the list of arguments passed has
4194 no impact, so we pretend it is unknown. */
4196 emit_call_1 (fun,
4197 get_identifier (XSTR (orgfun, 0)),
4198 build_function_type (tfom, NULL_TREE),
4199 original_args_size.constant, args_size.constant,
4200 struct_value_size,
4201 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
4202 valreg,
4203 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
4205 /* For calls to `setjmp', etc., inform flow.c it should complain
4206 if nonvolatile values are live. For functions that cannot return,
4207 inform flow that control does not fall through. */
4209 if (flags & (ECF_NORETURN | ECF_LONGJMP))
4211 /* The barrier note must be emitted
4212 immediately after the CALL_INSN. Some ports emit more than
4213 just a CALL_INSN above, so we must search for it here. */
4215 rtx last = get_last_insn ();
4216 while (GET_CODE (last) != CALL_INSN)
4218 last = PREV_INSN (last);
4219 /* There was no CALL_INSN? */
4220 if (last == before_call)
4221 abort ();
4224 emit_barrier_after (last);
4227 /* Now restore inhibit_defer_pop to its actual original value. */
4228 OK_DEFER_POP;
4230 /* If call is cse'able, make appropriate pair of reg-notes around it.
4231 Test valreg so we don't crash; may safely ignore `const'
4232 if return type is void. Disable for PARALLEL return values, because
4233 we have no way to move such values into a pseudo register. */
4234 if (flags & ECF_LIBCALL_BLOCK)
4236 rtx insns;
4238 if (valreg == 0)
4240 insns = get_insns ();
4241 end_sequence ();
4242 emit_insn (insns);
4244 else
4246 rtx note = 0;
4247 rtx temp;
4248 int i;
4250 if (GET_CODE (valreg) == PARALLEL)
4252 temp = gen_reg_rtx (outmode);
4253 emit_group_store (temp, valreg, NULL_TREE,
4254 GET_MODE_SIZE (outmode));
4255 valreg = temp;
4258 temp = gen_reg_rtx (GET_MODE (valreg));
4260 /* Construct an "equal form" for the value which mentions all the
4261 arguments in order as well as the function name. */
4262 for (i = 0; i < nargs; i++)
4263 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
4264 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
4266 insns = get_insns ();
4267 end_sequence ();
4269 if (flags & ECF_PURE)
4270 note = gen_rtx_EXPR_LIST (VOIDmode,
4271 gen_rtx_USE (VOIDmode,
4272 gen_rtx_MEM (BLKmode,
4273 gen_rtx_SCRATCH (VOIDmode))),
4274 note);
4276 emit_libcall_block (insns, temp, valreg, note);
4278 valreg = temp;
4281 pop_temp_slots ();
4283 /* Copy the value to the right place. */
4284 if (outmode != VOIDmode && retval)
4286 if (mem_value)
4288 if (value == 0)
4289 value = mem_value;
4290 if (value != mem_value)
4291 emit_move_insn (value, mem_value);
4293 else if (GET_CODE (valreg) == PARALLEL)
4295 if (value == 0)
4296 value = gen_reg_rtx (outmode);
4297 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
4299 else if (value != 0)
4300 emit_move_insn (value, valreg);
4301 else
4302 value = valreg;
4305 if (ACCUMULATE_OUTGOING_ARGS)
4307 #ifdef REG_PARM_STACK_SPACE
4308 if (save_area)
4309 restore_fixed_argument_area (save_area, argblock,
4310 high_to_save, low_to_save);
4311 #endif
4313 /* If we saved any argument areas, restore them. */
4314 for (count = 0; count < nargs; count++)
4315 if (argvec[count].save_area)
4317 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
4318 rtx adr = plus_constant (argblock,
4319 argvec[count].locate.offset.constant);
4320 rtx stack_area = gen_rtx_MEM (save_mode,
4321 memory_address (save_mode, adr));
4323 if (save_mode == BLKmode)
4324 emit_block_move (stack_area,
4325 validize_mem (argvec[count].save_area),
4326 GEN_INT (argvec[count].locate.size.constant),
4327 BLOCK_OP_CALL_PARM);
4328 else
4329 emit_move_insn (stack_area, argvec[count].save_area);
4332 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4333 stack_usage_map = initial_stack_usage_map;
4336 return value;
4340 /* Output a library call to function FUN (a SYMBOL_REF rtx)
4341 (emitting the queue unless NO_QUEUE is nonzero),
4342 for a value of mode OUTMODE,
4343 with NARGS different arguments, passed as alternating rtx values
4344 and machine_modes to convert them to.
4345 The rtx values should have been passed through protect_from_queue already.
4347 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
4348 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
4349 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
4350 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
4351 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
4352 or other LCT_ value for other types of library calls. */
4354 void
4355 emit_library_call (rtx orgfun, enum libcall_type fn_type,
4356 enum machine_mode outmode, int nargs, ...)
4358 va_list p;
4360 va_start (p, nargs);
4361 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4362 va_end (p);
4365 /* Like emit_library_call except that an extra argument, VALUE,
4366 comes second and says where to store the result.
4367 (If VALUE is zero, this function chooses a convenient way
4368 to return the value.
4370 This function returns an rtx for where the value is to be found.
4371 If VALUE is nonzero, VALUE is returned. */
4374 emit_library_call_value (rtx orgfun, rtx value,
4375 enum libcall_type fn_type,
4376 enum machine_mode outmode, int nargs, ...)
4378 rtx result;
4379 va_list p;
4381 va_start (p, nargs);
4382 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4383 nargs, p);
4384 va_end (p);
4386 return result;
4389 /* Store a single argument for a function call
4390 into the register or memory area where it must be passed.
4391 *ARG describes the argument value and where to pass it.
4393 ARGBLOCK is the address of the stack-block for all the arguments,
4394 or 0 on a machine where arguments are pushed individually.
4396 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4397 so must be careful about how the stack is used.
4399 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4400 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4401 that we need not worry about saving and restoring the stack.
4403 FNDECL is the declaration of the function we are calling.
4405 Return nonzero if this arg should cause sibcall failure,
4406 zero otherwise. */
4408 static int
4409 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4410 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4412 tree pval = arg->tree_value;
4413 rtx reg = 0;
4414 int partial = 0;
4415 int used = 0;
4416 int i, lower_bound = 0, upper_bound = 0;
4417 int sibcall_failure = 0;
4419 if (TREE_CODE (pval) == ERROR_MARK)
4420 return 1;
4422 /* Push a new temporary level for any temporaries we make for
4423 this argument. */
4424 push_temp_slots ();
4426 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4428 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4429 save any previous data at that location. */
4430 if (argblock && ! variable_size && arg->stack)
4432 #ifdef ARGS_GROW_DOWNWARD
4433 /* stack_slot is negative, but we want to index stack_usage_map
4434 with positive values. */
4435 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4436 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4437 else
4438 upper_bound = 0;
4440 lower_bound = upper_bound - arg->locate.size.constant;
4441 #else
4442 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4443 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4444 else
4445 lower_bound = 0;
4447 upper_bound = lower_bound + arg->locate.size.constant;
4448 #endif
4450 i = lower_bound;
4451 /* Don't worry about things in the fixed argument area;
4452 it has already been saved. */
4453 if (i < reg_parm_stack_space)
4454 i = reg_parm_stack_space;
4455 while (i < upper_bound && stack_usage_map[i] == 0)
4456 i++;
4458 if (i < upper_bound)
4460 /* We need to make a save area. */
4461 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4462 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4463 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4464 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4466 if (save_mode == BLKmode)
4468 tree ot = TREE_TYPE (arg->tree_value);
4469 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4470 | TYPE_QUAL_CONST));
4472 arg->save_area = assign_temp (nt, 0, 1, 1);
4473 preserve_temp_slots (arg->save_area);
4474 emit_block_move (validize_mem (arg->save_area), stack_area,
4475 expr_size (arg->tree_value),
4476 BLOCK_OP_CALL_PARM);
4478 else
4480 arg->save_area = gen_reg_rtx (save_mode);
4481 emit_move_insn (arg->save_area, stack_area);
4487 /* If this isn't going to be placed on both the stack and in registers,
4488 set up the register and number of words. */
4489 if (! arg->pass_on_stack)
4491 if (flags & ECF_SIBCALL)
4492 reg = arg->tail_call_reg;
4493 else
4494 reg = arg->reg;
4495 partial = arg->partial;
4498 if (reg != 0 && partial == 0)
4499 /* Being passed entirely in a register. We shouldn't be called in
4500 this case. */
4501 abort ();
4503 /* If this arg needs special alignment, don't load the registers
4504 here. */
4505 if (arg->n_aligned_regs != 0)
4506 reg = 0;
4508 /* If this is being passed partially in a register, we can't evaluate
4509 it directly into its stack slot. Otherwise, we can. */
4510 if (arg->value == 0)
4512 /* stack_arg_under_construction is nonzero if a function argument is
4513 being evaluated directly into the outgoing argument list and
4514 expand_call must take special action to preserve the argument list
4515 if it is called recursively.
4517 For scalar function arguments stack_usage_map is sufficient to
4518 determine which stack slots must be saved and restored. Scalar
4519 arguments in general have pass_on_stack == 0.
4521 If this argument is initialized by a function which takes the
4522 address of the argument (a C++ constructor or a C function
4523 returning a BLKmode structure), then stack_usage_map is
4524 insufficient and expand_call must push the stack around the
4525 function call. Such arguments have pass_on_stack == 1.
4527 Note that it is always safe to set stack_arg_under_construction,
4528 but this generates suboptimal code if set when not needed. */
4530 if (arg->pass_on_stack)
4531 stack_arg_under_construction++;
4533 arg->value = expand_expr (pval,
4534 (partial
4535 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4536 ? NULL_RTX : arg->stack,
4537 VOIDmode, EXPAND_STACK_PARM);
4539 /* If we are promoting object (or for any other reason) the mode
4540 doesn't agree, convert the mode. */
4542 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4543 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4544 arg->value, arg->unsignedp);
4546 if (arg->pass_on_stack)
4547 stack_arg_under_construction--;
4550 /* Don't allow anything left on stack from computation
4551 of argument to alloca. */
4552 if (flags & ECF_MAY_BE_ALLOCA)
4553 do_pending_stack_adjust ();
4555 if (arg->value == arg->stack)
4556 /* If the value is already in the stack slot, we are done. */
4558 else if (arg->mode != BLKmode)
4560 int size;
4562 /* Argument is a scalar, not entirely passed in registers.
4563 (If part is passed in registers, arg->partial says how much
4564 and emit_push_insn will take care of putting it there.)
4566 Push it, and if its size is less than the
4567 amount of space allocated to it,
4568 also bump stack pointer by the additional space.
4569 Note that in C the default argument promotions
4570 will prevent such mismatches. */
4572 size = GET_MODE_SIZE (arg->mode);
4573 /* Compute how much space the push instruction will push.
4574 On many machines, pushing a byte will advance the stack
4575 pointer by a halfword. */
4576 #ifdef PUSH_ROUNDING
4577 size = PUSH_ROUNDING (size);
4578 #endif
4579 used = size;
4581 /* Compute how much space the argument should get:
4582 round up to a multiple of the alignment for arguments. */
4583 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4584 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4585 / (PARM_BOUNDARY / BITS_PER_UNIT))
4586 * (PARM_BOUNDARY / BITS_PER_UNIT));
4588 /* This isn't already where we want it on the stack, so put it there.
4589 This can either be done with push or copy insns. */
4590 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4591 PARM_BOUNDARY, partial, reg, used - size, argblock,
4592 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4593 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4595 /* Unless this is a partially-in-register argument, the argument is now
4596 in the stack. */
4597 if (partial == 0)
4598 arg->value = arg->stack;
4600 else
4602 /* BLKmode, at least partly to be pushed. */
4604 unsigned int parm_align;
4605 int excess;
4606 rtx size_rtx;
4608 /* Pushing a nonscalar.
4609 If part is passed in registers, PARTIAL says how much
4610 and emit_push_insn will take care of putting it there. */
4612 /* Round its size up to a multiple
4613 of the allocation unit for arguments. */
4615 if (arg->locate.size.var != 0)
4617 excess = 0;
4618 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4620 else
4622 /* PUSH_ROUNDING has no effect on us, because
4623 emit_push_insn for BLKmode is careful to avoid it. */
4624 if (reg && GET_CODE (reg) == PARALLEL)
4626 /* Use the size of the elt to compute excess. */
4627 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
4628 excess = (arg->locate.size.constant
4629 - int_size_in_bytes (TREE_TYPE (pval))
4630 + partial * GET_MODE_SIZE (GET_MODE (elt)));
4632 else
4633 excess = (arg->locate.size.constant
4634 - int_size_in_bytes (TREE_TYPE (pval))
4635 + partial * UNITS_PER_WORD);
4636 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4637 NULL_RTX, TYPE_MODE (sizetype), 0);
4640 /* Some types will require stricter alignment, which will be
4641 provided for elsewhere in argument layout. */
4642 parm_align = MAX (PARM_BOUNDARY, TYPE_ALIGN (TREE_TYPE (pval)));
4644 /* When an argument is padded down, the block is aligned to
4645 PARM_BOUNDARY, but the actual argument isn't. */
4646 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4648 if (arg->locate.size.var)
4649 parm_align = BITS_PER_UNIT;
4650 else if (excess)
4652 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4653 parm_align = MIN (parm_align, excess_align);
4657 if ((flags & ECF_SIBCALL) && GET_CODE (arg->value) == MEM)
4659 /* emit_push_insn might not work properly if arg->value and
4660 argblock + arg->locate.offset areas overlap. */
4661 rtx x = arg->value;
4662 int i = 0;
4664 if (XEXP (x, 0) == current_function_internal_arg_pointer
4665 || (GET_CODE (XEXP (x, 0)) == PLUS
4666 && XEXP (XEXP (x, 0), 0) ==
4667 current_function_internal_arg_pointer
4668 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4670 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4671 i = INTVAL (XEXP (XEXP (x, 0), 1));
4673 /* expand_call should ensure this */
4674 if (arg->locate.offset.var || GET_CODE (size_rtx) != CONST_INT)
4675 abort ();
4677 if (arg->locate.offset.constant > i)
4679 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4680 sibcall_failure = 1;
4682 else if (arg->locate.offset.constant < i)
4684 if (i < arg->locate.offset.constant + INTVAL (size_rtx))
4685 sibcall_failure = 1;
4690 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4691 parm_align, partial, reg, excess, argblock,
4692 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4693 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4695 /* Unless this is a partially-in-register argument, the argument is now
4696 in the stack.
4698 ??? Unlike the case above, in which we want the actual
4699 address of the data, so that we can load it directly into a
4700 register, here we want the address of the stack slot, so that
4701 it's properly aligned for word-by-word copying or something
4702 like that. It's not clear that this is always correct. */
4703 if (partial == 0)
4704 arg->value = arg->stack_slot;
4707 /* Mark all slots this store used. */
4708 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4709 && argblock && ! variable_size && arg->stack)
4710 for (i = lower_bound; i < upper_bound; i++)
4711 stack_usage_map[i] = 1;
4713 /* Once we have pushed something, pops can't safely
4714 be deferred during the rest of the arguments. */
4715 NO_DEFER_POP;
4717 /* ANSI doesn't require a sequence point here,
4718 but PCC has one, so this will avoid some problems. */
4719 emit_queue ();
4721 /* Free any temporary slots made in processing this argument. Show
4722 that we might have taken the address of something and pushed that
4723 as an operand. */
4724 preserve_temp_slots (NULL_RTX);
4725 free_temp_slots ();
4726 pop_temp_slots ();
4728 return sibcall_failure;
4731 /* Nonzero if we do not know how to pass TYPE solely in registers.
4732 We cannot do so in the following cases:
4734 - if the type has variable size
4735 - if the type is marked as addressable (it is required to be constructed
4736 into the stack)
4737 - if the padding and mode of the type is such that a copy into a register
4738 would put it into the wrong part of the register.
4740 Which padding can't be supported depends on the byte endianness.
4742 A value in a register is implicitly padded at the most significant end.
4743 On a big-endian machine, that is the lower end in memory.
4744 So a value padded in memory at the upper end can't go in a register.
4745 For a little-endian machine, the reverse is true. */
4747 bool
4748 default_must_pass_in_stack (enum machine_mode mode, tree type)
4750 if (!type)
4751 return false;
4753 /* If the type has variable size... */
4754 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4755 return true;
4757 /* If the type is marked as addressable (it is required
4758 to be constructed into the stack)... */
4759 if (TREE_ADDRESSABLE (type))
4760 return true;
4762 /* If the padding and mode of the type is such that a copy into
4763 a register would put it into the wrong part of the register. */
4764 if (mode == BLKmode
4765 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4766 && (FUNCTION_ARG_PADDING (mode, type)
4767 == (BYTES_BIG_ENDIAN ? upward : downward)))
4768 return true;
4770 return false;