* lib/target-supports.exp (check_iconv_available): Fix comment.
[official-gcc.git] / gcc / calls.c
blobc84c903c3326f6b4620a18ac07a08768887d860d
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "flags.h"
30 #include "expr.h"
31 #include "optabs.h"
32 #include "libfuncs.h"
33 #include "function.h"
34 #include "regs.h"
35 #include "toplev.h"
36 #include "output.h"
37 #include "tm_p.h"
38 #include "timevar.h"
39 #include "sbitmap.h"
40 #include "langhooks.h"
41 #include "target.h"
42 #include "cgraph.h"
43 #include "except.h"
45 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
46 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
48 /* Data structure and subroutines used within expand_call. */
50 struct arg_data
52 /* Tree node for this argument. */
53 tree tree_value;
54 /* Mode for value; TYPE_MODE unless promoted. */
55 enum machine_mode mode;
56 /* Current RTL value for argument, or 0 if it isn't precomputed. */
57 rtx value;
58 /* Initially-compute RTL value for argument; only for const functions. */
59 rtx initial_value;
60 /* Register to pass this argument in, 0 if passed on stack, or an
61 PARALLEL if the arg is to be copied into multiple non-contiguous
62 registers. */
63 rtx reg;
64 /* Register to pass this argument in when generating tail call sequence.
65 This is not the same register as for normal calls on machines with
66 register windows. */
67 rtx tail_call_reg;
68 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
69 form for emit_group_move. */
70 rtx parallel_value;
71 /* If REG was promoted from the actual mode of the argument expression,
72 indicates whether the promotion is sign- or zero-extended. */
73 int unsignedp;
74 /* Number of registers to use. 0 means put the whole arg in registers.
75 Also 0 if not passed in registers. */
76 int partial;
77 /* Nonzero if argument must be passed on stack.
78 Note that some arguments may be passed on the stack
79 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
80 pass_on_stack identifies arguments that *cannot* go in registers. */
81 int pass_on_stack;
82 /* Some fields packaged up for locate_and_pad_parm. */
83 struct locate_and_pad_arg_data locate;
84 /* Location on the stack at which parameter should be stored. The store
85 has already been done if STACK == VALUE. */
86 rtx stack;
87 /* Location on the stack of the start of this argument slot. This can
88 differ from STACK if this arg pads downward. This location is known
89 to be aligned to FUNCTION_ARG_BOUNDARY. */
90 rtx stack_slot;
91 /* Place that this stack area has been saved, if needed. */
92 rtx save_area;
93 /* If an argument's alignment does not permit direct copying into registers,
94 copy in smaller-sized pieces into pseudos. These are stored in a
95 block pointed to by this field. The next field says how many
96 word-sized pseudos we made. */
97 rtx *aligned_regs;
98 int n_aligned_regs;
101 /* A vector of one char per byte of stack space. A byte if nonzero if
102 the corresponding stack location has been used.
103 This vector is used to prevent a function call within an argument from
104 clobbering any stack already set up. */
105 static char *stack_usage_map;
107 /* Size of STACK_USAGE_MAP. */
108 static int highest_outgoing_arg_in_use;
110 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
111 stack location's tail call argument has been already stored into the stack.
112 This bitmap is used to prevent sibling call optimization if function tries
113 to use parent's incoming argument slots when they have been already
114 overwritten with tail call arguments. */
115 static sbitmap stored_args_map;
117 /* stack_arg_under_construction is nonzero when an argument may be
118 initialized with a constructor call (including a C function that
119 returns a BLKmode struct) and expand_call must take special action
120 to make sure the object being constructed does not overlap the
121 argument list for the constructor call. */
122 int stack_arg_under_construction;
124 static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
125 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
126 CUMULATIVE_ARGS *);
127 static void precompute_register_parameters (int, struct arg_data *, int *);
128 static int store_one_arg (struct arg_data *, rtx, int, int, int);
129 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
130 static int finalize_must_preallocate (int, int, struct arg_data *,
131 struct args_size *);
132 static void precompute_arguments (int, int, struct arg_data *);
133 static int compute_argument_block_size (int, struct args_size *, int);
134 static void initialize_argument_information (int, struct arg_data *,
135 struct args_size *, int, tree,
136 tree, CUMULATIVE_ARGS *, int,
137 rtx *, int *, int *, int *,
138 bool *, bool);
139 static void compute_argument_addresses (struct arg_data *, rtx, int);
140 static rtx rtx_for_function_call (tree, tree);
141 static void load_register_parameters (struct arg_data *, int, rtx *, int,
142 int, int *);
143 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
144 enum machine_mode, int, va_list);
145 static int special_function_p (tree, int);
146 static int check_sibcall_argument_overlap_1 (rtx);
147 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
149 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
150 unsigned int);
151 static tree split_complex_values (tree);
152 static tree split_complex_types (tree);
154 #ifdef REG_PARM_STACK_SPACE
155 static rtx save_fixed_argument_area (int, rtx, int *, int *);
156 static void restore_fixed_argument_area (rtx, rtx, int, int);
157 #endif
159 /* Force FUNEXP into a form suitable for the address of a CALL,
160 and return that as an rtx. Also load the static chain register
161 if FNDECL is a nested function.
163 CALL_FUSAGE points to a variable holding the prospective
164 CALL_INSN_FUNCTION_USAGE information. */
167 prepare_call_address (rtx funexp, rtx static_chain_value,
168 rtx *call_fusage, int reg_parm_seen, int sibcallp)
170 /* Make a valid memory address and copy constants through pseudo-regs,
171 but not for a constant address if -fno-function-cse. */
172 if (GET_CODE (funexp) != SYMBOL_REF)
173 /* If we are using registers for parameters, force the
174 function address into a register now. */
175 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
176 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
177 : memory_address (FUNCTION_MODE, funexp));
178 else if (! sibcallp)
180 #ifndef NO_FUNCTION_CSE
181 if (optimize && ! flag_no_function_cse)
182 funexp = force_reg (Pmode, funexp);
183 #endif
186 if (static_chain_value != 0)
188 static_chain_value = convert_memory_address (Pmode, static_chain_value);
189 emit_move_insn (static_chain_rtx, static_chain_value);
191 if (REG_P (static_chain_rtx))
192 use_reg (call_fusage, static_chain_rtx);
195 return funexp;
198 /* Generate instructions to call function FUNEXP,
199 and optionally pop the results.
200 The CALL_INSN is the first insn generated.
202 FNDECL is the declaration node of the function. This is given to the
203 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
205 FUNTYPE is the data type of the function. This is given to the macro
206 RETURN_POPS_ARGS to determine whether this function pops its own args.
207 We used to allow an identifier for library functions, but that doesn't
208 work when the return type is an aggregate type and the calling convention
209 says that the pointer to this aggregate is to be popped by the callee.
211 STACK_SIZE is the number of bytes of arguments on the stack,
212 ROUNDED_STACK_SIZE is that number rounded up to
213 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
214 both to put into the call insn and to generate explicit popping
215 code if necessary.
217 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
218 It is zero if this call doesn't want a structure value.
220 NEXT_ARG_REG is the rtx that results from executing
221 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
222 just after all the args have had their registers assigned.
223 This could be whatever you like, but normally it is the first
224 arg-register beyond those used for args in this call,
225 or 0 if all the arg-registers are used in this call.
226 It is passed on to `gen_call' so you can put this info in the call insn.
228 VALREG is a hard register in which a value is returned,
229 or 0 if the call does not return a value.
231 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
232 the args to this call were processed.
233 We restore `inhibit_defer_pop' to that value.
235 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
236 denote registers used by the called function. */
238 static void
239 emit_call_1 (rtx funexp, tree fntree, tree fndecl ATTRIBUTE_UNUSED,
240 tree funtype ATTRIBUTE_UNUSED,
241 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
242 HOST_WIDE_INT rounded_stack_size,
243 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
244 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
245 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
246 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
248 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
249 rtx call_insn;
250 int already_popped = 0;
251 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
252 #if defined (HAVE_call) && defined (HAVE_call_value)
253 rtx struct_value_size_rtx;
254 struct_value_size_rtx = GEN_INT (struct_value_size);
255 #endif
257 #ifdef CALL_POPS_ARGS
258 n_popped += CALL_POPS_ARGS (* args_so_far);
259 #endif
261 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
262 and we don't want to load it into a register as an optimization,
263 because prepare_call_address already did it if it should be done. */
264 if (GET_CODE (funexp) != SYMBOL_REF)
265 funexp = memory_address (FUNCTION_MODE, funexp);
267 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
268 if ((ecf_flags & ECF_SIBCALL)
269 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
270 && (n_popped > 0 || stack_size == 0))
272 rtx n_pop = GEN_INT (n_popped);
273 rtx pat;
275 /* If this subroutine pops its own args, record that in the call insn
276 if possible, for the sake of frame pointer elimination. */
278 if (valreg)
279 pat = GEN_SIBCALL_VALUE_POP (valreg,
280 gen_rtx_MEM (FUNCTION_MODE, funexp),
281 rounded_stack_size_rtx, next_arg_reg,
282 n_pop);
283 else
284 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
285 rounded_stack_size_rtx, next_arg_reg, n_pop);
287 emit_call_insn (pat);
288 already_popped = 1;
290 else
291 #endif
293 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
294 /* If the target has "call" or "call_value" insns, then prefer them
295 if no arguments are actually popped. If the target does not have
296 "call" or "call_value" insns, then we must use the popping versions
297 even if the call has no arguments to pop. */
298 #if defined (HAVE_call) && defined (HAVE_call_value)
299 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
300 && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
301 #else
302 if (HAVE_call_pop && HAVE_call_value_pop)
303 #endif
305 rtx n_pop = GEN_INT (n_popped);
306 rtx pat;
308 /* If this subroutine pops its own args, record that in the call insn
309 if possible, for the sake of frame pointer elimination. */
311 if (valreg)
312 pat = GEN_CALL_VALUE_POP (valreg,
313 gen_rtx_MEM (FUNCTION_MODE, funexp),
314 rounded_stack_size_rtx, next_arg_reg, n_pop);
315 else
316 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
317 rounded_stack_size_rtx, next_arg_reg, n_pop);
319 emit_call_insn (pat);
320 already_popped = 1;
322 else
323 #endif
325 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
326 if ((ecf_flags & ECF_SIBCALL)
327 && HAVE_sibcall && HAVE_sibcall_value)
329 if (valreg)
330 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
331 gen_rtx_MEM (FUNCTION_MODE, funexp),
332 rounded_stack_size_rtx,
333 next_arg_reg, NULL_RTX));
334 else
335 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
336 rounded_stack_size_rtx, next_arg_reg,
337 struct_value_size_rtx));
339 else
340 #endif
342 #if defined (HAVE_call) && defined (HAVE_call_value)
343 if (HAVE_call && HAVE_call_value)
345 if (valreg)
346 emit_call_insn (GEN_CALL_VALUE (valreg,
347 gen_rtx_MEM (FUNCTION_MODE, funexp),
348 rounded_stack_size_rtx, next_arg_reg,
349 NULL_RTX));
350 else
351 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
352 rounded_stack_size_rtx, next_arg_reg,
353 struct_value_size_rtx));
355 else
356 #endif
357 gcc_unreachable ();
359 /* Find the call we just emitted. */
360 call_insn = last_call_insn ();
362 /* Mark memory as used for "pure" function call. */
363 if (ecf_flags & ECF_PURE)
364 call_fusage
365 = gen_rtx_EXPR_LIST
366 (VOIDmode,
367 gen_rtx_USE (VOIDmode,
368 gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
369 call_fusage);
371 /* Put the register usage information there. */
372 add_function_usage_to (call_insn, call_fusage);
374 /* If this is a const call, then set the insn's unchanging bit. */
375 if (ecf_flags & (ECF_CONST | ECF_PURE))
376 CONST_OR_PURE_CALL_P (call_insn) = 1;
378 /* If this call can't throw, attach a REG_EH_REGION reg note to that
379 effect. */
380 if (ecf_flags & ECF_NOTHROW)
381 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
382 REG_NOTES (call_insn));
383 else
385 int rn = lookup_stmt_eh_region (fntree);
387 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't
388 throw, which we already took care of. */
389 if (rn > 0)
390 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
391 REG_NOTES (call_insn));
392 note_current_region_may_contain_throw ();
395 if (ecf_flags & ECF_NORETURN)
396 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
397 REG_NOTES (call_insn));
399 if (ecf_flags & ECF_RETURNS_TWICE)
401 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
402 REG_NOTES (call_insn));
403 current_function_calls_setjmp = 1;
406 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
408 /* Restore this now, so that we do defer pops for this call's args
409 if the context of the call as a whole permits. */
410 inhibit_defer_pop = old_inhibit_defer_pop;
412 if (n_popped > 0)
414 if (!already_popped)
415 CALL_INSN_FUNCTION_USAGE (call_insn)
416 = gen_rtx_EXPR_LIST (VOIDmode,
417 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
418 CALL_INSN_FUNCTION_USAGE (call_insn));
419 rounded_stack_size -= n_popped;
420 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
421 stack_pointer_delta -= n_popped;
424 if (!ACCUMULATE_OUTGOING_ARGS)
426 /* If returning from the subroutine does not automatically pop the args,
427 we need an instruction to pop them sooner or later.
428 Perhaps do it now; perhaps just record how much space to pop later.
430 If returning from the subroutine does pop the args, indicate that the
431 stack pointer will be changed. */
433 if (rounded_stack_size != 0)
435 if (ecf_flags & (ECF_SP_DEPRESSED | ECF_NORETURN))
436 /* Just pretend we did the pop. */
437 stack_pointer_delta -= rounded_stack_size;
438 else if (flag_defer_pop && inhibit_defer_pop == 0
439 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
440 pending_stack_adjust += rounded_stack_size;
441 else
442 adjust_stack (rounded_stack_size_rtx);
445 /* When we accumulate outgoing args, we must avoid any stack manipulations.
446 Restore the stack pointer to its original value now. Usually
447 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
448 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
449 popping variants of functions exist as well.
451 ??? We may optimize similar to defer_pop above, but it is
452 probably not worthwhile.
454 ??? It will be worthwhile to enable combine_stack_adjustments even for
455 such machines. */
456 else if (n_popped)
457 anti_adjust_stack (GEN_INT (n_popped));
460 /* Determine if the function identified by NAME and FNDECL is one with
461 special properties we wish to know about.
463 For example, if the function might return more than one time (setjmp), then
464 set RETURNS_TWICE to a nonzero value.
466 Similarly set LONGJMP for if the function is in the longjmp family.
468 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
469 space from the stack such as alloca. */
471 static int
472 special_function_p (tree fndecl, int flags)
474 if (fndecl && DECL_NAME (fndecl)
475 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
476 /* Exclude functions not at the file scope, or not `extern',
477 since they are not the magic functions we would otherwise
478 think they are.
479 FIXME: this should be handled with attributes, not with this
480 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
481 because you can declare fork() inside a function if you
482 wish. */
483 && (DECL_CONTEXT (fndecl) == NULL_TREE
484 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
485 && TREE_PUBLIC (fndecl))
487 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
488 const char *tname = name;
490 /* We assume that alloca will always be called by name. It
491 makes no sense to pass it as a pointer-to-function to
492 anything that does not understand its behavior. */
493 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
494 && name[0] == 'a'
495 && ! strcmp (name, "alloca"))
496 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
497 && name[0] == '_'
498 && ! strcmp (name, "__builtin_alloca"))))
499 flags |= ECF_MAY_BE_ALLOCA;
501 /* Disregard prefix _, __ or __x. */
502 if (name[0] == '_')
504 if (name[1] == '_' && name[2] == 'x')
505 tname += 3;
506 else if (name[1] == '_')
507 tname += 2;
508 else
509 tname += 1;
512 if (tname[0] == 's')
514 if ((tname[1] == 'e'
515 && (! strcmp (tname, "setjmp")
516 || ! strcmp (tname, "setjmp_syscall")))
517 || (tname[1] == 'i'
518 && ! strcmp (tname, "sigsetjmp"))
519 || (tname[1] == 'a'
520 && ! strcmp (tname, "savectx")))
521 flags |= ECF_RETURNS_TWICE;
523 if (tname[1] == 'i'
524 && ! strcmp (tname, "siglongjmp"))
525 flags |= ECF_NORETURN;
527 else if ((tname[0] == 'q' && tname[1] == 's'
528 && ! strcmp (tname, "qsetjmp"))
529 || (tname[0] == 'v' && tname[1] == 'f'
530 && ! strcmp (tname, "vfork")))
531 flags |= ECF_RETURNS_TWICE;
533 else if (tname[0] == 'l' && tname[1] == 'o'
534 && ! strcmp (tname, "longjmp"))
535 flags |= ECF_NORETURN;
538 return flags;
541 /* Return nonzero when tree represent call to longjmp. */
544 setjmp_call_p (tree fndecl)
546 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
549 /* Return true when exp contains alloca call. */
550 bool
551 alloca_call_p (tree exp)
553 if (TREE_CODE (exp) == CALL_EXPR
554 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
555 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
556 == FUNCTION_DECL)
557 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
558 0) & ECF_MAY_BE_ALLOCA))
559 return true;
560 return false;
563 /* Detect flags (function attributes) from the function decl or type node. */
566 flags_from_decl_or_type (tree exp)
568 int flags = 0;
569 tree type = exp;
571 if (DECL_P (exp))
573 struct cgraph_rtl_info *i = cgraph_rtl_info (exp);
574 type = TREE_TYPE (exp);
576 if (i)
578 if (i->pure_function)
579 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
580 if (i->const_function)
581 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
584 /* The function exp may have the `malloc' attribute. */
585 if (DECL_IS_MALLOC (exp))
586 flags |= ECF_MALLOC;
588 /* The function exp may have the `pure' attribute. */
589 if (DECL_IS_PURE (exp))
590 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
592 if (TREE_NOTHROW (exp))
593 flags |= ECF_NOTHROW;
595 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
596 flags |= ECF_LIBCALL_BLOCK | ECF_CONST;
598 flags = special_function_p (exp, flags);
600 else if (TYPE_P (exp) && TYPE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
601 flags |= ECF_CONST;
603 if (TREE_THIS_VOLATILE (exp))
604 flags |= ECF_NORETURN;
606 /* Mark if the function returns with the stack pointer depressed. We
607 cannot consider it pure or constant in that case. */
608 if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
610 flags |= ECF_SP_DEPRESSED;
611 flags &= ~(ECF_PURE | ECF_CONST | ECF_LIBCALL_BLOCK);
614 return flags;
617 /* Detect flags from a CALL_EXPR. */
620 call_expr_flags (tree t)
622 int flags;
623 tree decl = get_callee_fndecl (t);
625 if (decl)
626 flags = flags_from_decl_or_type (decl);
627 else
629 t = TREE_TYPE (TREE_OPERAND (t, 0));
630 if (t && TREE_CODE (t) == POINTER_TYPE)
631 flags = flags_from_decl_or_type (TREE_TYPE (t));
632 else
633 flags = 0;
636 return flags;
639 /* Precompute all register parameters as described by ARGS, storing values
640 into fields within the ARGS array.
642 NUM_ACTUALS indicates the total number elements in the ARGS array.
644 Set REG_PARM_SEEN if we encounter a register parameter. */
646 static void
647 precompute_register_parameters (int num_actuals, struct arg_data *args,
648 int *reg_parm_seen)
650 int i;
652 *reg_parm_seen = 0;
654 for (i = 0; i < num_actuals; i++)
655 if (args[i].reg != 0 && ! args[i].pass_on_stack)
657 *reg_parm_seen = 1;
659 if (args[i].value == 0)
661 push_temp_slots ();
662 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
663 VOIDmode, 0);
664 preserve_temp_slots (args[i].value);
665 pop_temp_slots ();
668 /* If the value is a non-legitimate constant, force it into a
669 pseudo now. TLS symbols sometimes need a call to resolve. */
670 if (CONSTANT_P (args[i].value)
671 && !LEGITIMATE_CONSTANT_P (args[i].value))
672 args[i].value = force_reg (args[i].mode, args[i].value);
674 /* If we are to promote the function arg to a wider mode,
675 do it now. */
677 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
678 args[i].value
679 = convert_modes (args[i].mode,
680 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
681 args[i].value, args[i].unsignedp);
683 /* If we're going to have to load the value by parts, pull the
684 parts into pseudos. The part extraction process can involve
685 non-trivial computation. */
686 if (GET_CODE (args[i].reg) == PARALLEL)
688 tree type = TREE_TYPE (args[i].tree_value);
689 args[i].parallel_value
690 = emit_group_load_into_temps (args[i].reg, args[i].value,
691 type, int_size_in_bytes (type));
694 /* If the value is expensive, and we are inside an appropriately
695 short loop, put the value into a pseudo and then put the pseudo
696 into the hard reg.
698 For small register classes, also do this if this call uses
699 register parameters. This is to avoid reload conflicts while
700 loading the parameters registers. */
702 else if ((! (REG_P (args[i].value)
703 || (GET_CODE (args[i].value) == SUBREG
704 && REG_P (SUBREG_REG (args[i].value)))))
705 && args[i].mode != BLKmode
706 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
707 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
708 || optimize))
709 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
713 #ifdef REG_PARM_STACK_SPACE
715 /* The argument list is the property of the called routine and it
716 may clobber it. If the fixed area has been used for previous
717 parameters, we must save and restore it. */
719 static rtx
720 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
722 int low;
723 int high;
725 /* Compute the boundary of the area that needs to be saved, if any. */
726 high = reg_parm_stack_space;
727 #ifdef ARGS_GROW_DOWNWARD
728 high += 1;
729 #endif
730 if (high > highest_outgoing_arg_in_use)
731 high = highest_outgoing_arg_in_use;
733 for (low = 0; low < high; low++)
734 if (stack_usage_map[low] != 0)
736 int num_to_save;
737 enum machine_mode save_mode;
738 int delta;
739 rtx stack_area;
740 rtx save_area;
742 while (stack_usage_map[--high] == 0)
745 *low_to_save = low;
746 *high_to_save = high;
748 num_to_save = high - low + 1;
749 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
751 /* If we don't have the required alignment, must do this
752 in BLKmode. */
753 if ((low & (MIN (GET_MODE_SIZE (save_mode),
754 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
755 save_mode = BLKmode;
757 #ifdef ARGS_GROW_DOWNWARD
758 delta = -high;
759 #else
760 delta = low;
761 #endif
762 stack_area = gen_rtx_MEM (save_mode,
763 memory_address (save_mode,
764 plus_constant (argblock,
765 delta)));
767 set_mem_align (stack_area, PARM_BOUNDARY);
768 if (save_mode == BLKmode)
770 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
771 emit_block_move (validize_mem (save_area), stack_area,
772 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
774 else
776 save_area = gen_reg_rtx (save_mode);
777 emit_move_insn (save_area, stack_area);
780 return save_area;
783 return NULL_RTX;
786 static void
787 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
789 enum machine_mode save_mode = GET_MODE (save_area);
790 int delta;
791 rtx stack_area;
793 #ifdef ARGS_GROW_DOWNWARD
794 delta = -high_to_save;
795 #else
796 delta = low_to_save;
797 #endif
798 stack_area = gen_rtx_MEM (save_mode,
799 memory_address (save_mode,
800 plus_constant (argblock, delta)));
801 set_mem_align (stack_area, PARM_BOUNDARY);
803 if (save_mode != BLKmode)
804 emit_move_insn (stack_area, save_area);
805 else
806 emit_block_move (stack_area, validize_mem (save_area),
807 GEN_INT (high_to_save - low_to_save + 1),
808 BLOCK_OP_CALL_PARM);
810 #endif /* REG_PARM_STACK_SPACE */
812 /* If any elements in ARGS refer to parameters that are to be passed in
813 registers, but not in memory, and whose alignment does not permit a
814 direct copy into registers. Copy the values into a group of pseudos
815 which we will later copy into the appropriate hard registers.
817 Pseudos for each unaligned argument will be stored into the array
818 args[argnum].aligned_regs. The caller is responsible for deallocating
819 the aligned_regs array if it is nonzero. */
821 static void
822 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
824 int i, j;
826 for (i = 0; i < num_actuals; i++)
827 if (args[i].reg != 0 && ! args[i].pass_on_stack
828 && args[i].mode == BLKmode
829 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
830 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
832 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
833 int endian_correction = 0;
835 if (args[i].partial)
837 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
838 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
840 else
842 args[i].n_aligned_regs
843 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
846 args[i].aligned_regs = xmalloc (sizeof (rtx) * args[i].n_aligned_regs);
848 /* Structures smaller than a word are normally aligned to the
849 least significant byte. On a BYTES_BIG_ENDIAN machine,
850 this means we must skip the empty high order bytes when
851 calculating the bit offset. */
852 if (bytes < UNITS_PER_WORD
853 #ifdef BLOCK_REG_PADDING
854 && (BLOCK_REG_PADDING (args[i].mode,
855 TREE_TYPE (args[i].tree_value), 1)
856 == downward)
857 #else
858 && BYTES_BIG_ENDIAN
859 #endif
861 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
863 for (j = 0; j < args[i].n_aligned_regs; j++)
865 rtx reg = gen_reg_rtx (word_mode);
866 rtx word = operand_subword_force (args[i].value, j, BLKmode);
867 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
869 args[i].aligned_regs[j] = reg;
870 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
871 word_mode, word_mode);
873 /* There is no need to restrict this code to loading items
874 in TYPE_ALIGN sized hunks. The bitfield instructions can
875 load up entire word sized registers efficiently.
877 ??? This may not be needed anymore.
878 We use to emit a clobber here but that doesn't let later
879 passes optimize the instructions we emit. By storing 0 into
880 the register later passes know the first AND to zero out the
881 bitfield being set in the register is unnecessary. The store
882 of 0 will be deleted as will at least the first AND. */
884 emit_move_insn (reg, const0_rtx);
886 bytes -= bitsize / BITS_PER_UNIT;
887 store_bit_field (reg, bitsize, endian_correction, word_mode,
888 word);
893 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
894 ACTPARMS.
896 NUM_ACTUALS is the total number of parameters.
898 N_NAMED_ARGS is the total number of named arguments.
900 FNDECL is the tree code for the target of this call (if known)
902 ARGS_SO_FAR holds state needed by the target to know where to place
903 the next argument.
905 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
906 for arguments which are passed in registers.
908 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
909 and may be modified by this routine.
911 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
912 flags which may may be modified by this routine.
914 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
915 that requires allocation of stack space.
917 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
918 the thunked-to function. */
920 static void
921 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
922 struct arg_data *args,
923 struct args_size *args_size,
924 int n_named_args ATTRIBUTE_UNUSED,
925 tree actparms, tree fndecl,
926 CUMULATIVE_ARGS *args_so_far,
927 int reg_parm_stack_space,
928 rtx *old_stack_level, int *old_pending_adj,
929 int *must_preallocate, int *ecf_flags,
930 bool *may_tailcall, bool call_from_thunk_p)
932 /* 1 if scanning parms front to back, -1 if scanning back to front. */
933 int inc;
935 /* Count arg position in order args appear. */
936 int argpos;
938 int i;
939 tree p;
941 args_size->constant = 0;
942 args_size->var = 0;
944 /* In this loop, we consider args in the order they are written.
945 We fill up ARGS from the front or from the back if necessary
946 so that in any case the first arg to be pushed ends up at the front. */
948 if (PUSH_ARGS_REVERSED)
950 i = num_actuals - 1, inc = -1;
951 /* In this case, must reverse order of args
952 so that we compute and push the last arg first. */
954 else
956 i = 0, inc = 1;
959 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
960 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
962 tree type = TREE_TYPE (TREE_VALUE (p));
963 int unsignedp;
964 enum machine_mode mode;
966 args[i].tree_value = TREE_VALUE (p);
968 /* Replace erroneous argument with constant zero. */
969 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
970 args[i].tree_value = integer_zero_node, type = integer_type_node;
972 /* If TYPE is a transparent union, pass things the way we would
973 pass the first field of the union. We have already verified that
974 the modes are the same. */
975 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
976 type = TREE_TYPE (TYPE_FIELDS (type));
978 /* Decide where to pass this arg.
980 args[i].reg is nonzero if all or part is passed in registers.
982 args[i].partial is nonzero if part but not all is passed in registers,
983 and the exact value says how many bytes are passed in registers.
985 args[i].pass_on_stack is nonzero if the argument must at least be
986 computed on the stack. It may then be loaded back into registers
987 if args[i].reg is nonzero.
989 These decisions are driven by the FUNCTION_... macros and must agree
990 with those made by function.c. */
992 /* See if this argument should be passed by invisible reference. */
993 if (pass_by_reference (args_so_far, TYPE_MODE (type),
994 type, argpos < n_named_args))
996 bool callee_copies;
997 tree base;
999 callee_copies
1000 = reference_callee_copied (args_so_far, TYPE_MODE (type),
1001 type, argpos < n_named_args);
1003 /* If we're compiling a thunk, pass through invisible references
1004 instead of making a copy. */
1005 if (call_from_thunk_p
1006 || (callee_copies
1007 && !TREE_ADDRESSABLE (type)
1008 && (base = get_base_address (args[i].tree_value))
1009 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
1011 /* We can't use sibcalls if a callee-copied argument is
1012 stored in the current function's frame. */
1013 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1014 *may_tailcall = false;
1016 args[i].tree_value = build_fold_addr_expr (args[i].tree_value);
1017 type = TREE_TYPE (args[i].tree_value);
1019 *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK);
1021 else
1023 /* We make a copy of the object and pass the address to the
1024 function being called. */
1025 rtx copy;
1027 if (!COMPLETE_TYPE_P (type)
1028 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1029 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1030 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1031 STACK_CHECK_MAX_VAR_SIZE))))
1033 /* This is a variable-sized object. Make space on the stack
1034 for it. */
1035 rtx size_rtx = expr_size (TREE_VALUE (p));
1037 if (*old_stack_level == 0)
1039 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1040 *old_pending_adj = pending_stack_adjust;
1041 pending_stack_adjust = 0;
1044 copy = gen_rtx_MEM (BLKmode,
1045 allocate_dynamic_stack_space
1046 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1047 set_mem_attributes (copy, type, 1);
1049 else
1050 copy = assign_temp (type, 0, 1, 0);
1052 store_expr (args[i].tree_value, copy, 0);
1054 if (callee_copies)
1055 *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK);
1056 else
1057 *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1059 args[i].tree_value
1060 = build_fold_addr_expr (make_tree (type, copy));
1061 type = TREE_TYPE (args[i].tree_value);
1062 *may_tailcall = false;
1066 mode = TYPE_MODE (type);
1067 unsignedp = TYPE_UNSIGNED (type);
1069 if (targetm.calls.promote_function_args (fndecl ? TREE_TYPE (fndecl) : 0))
1070 mode = promote_mode (type, mode, &unsignedp, 1);
1072 args[i].unsignedp = unsignedp;
1073 args[i].mode = mode;
1075 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1076 argpos < n_named_args);
1077 #ifdef FUNCTION_INCOMING_ARG
1078 /* If this is a sibling call and the machine has register windows, the
1079 register window has to be unwinded before calling the routine, so
1080 arguments have to go into the incoming registers. */
1081 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1082 argpos < n_named_args);
1083 #else
1084 args[i].tail_call_reg = args[i].reg;
1085 #endif
1087 if (args[i].reg)
1088 args[i].partial
1089 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1090 argpos < n_named_args);
1092 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
1094 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1095 it means that we are to pass this arg in the register(s) designated
1096 by the PARALLEL, but also to pass it in the stack. */
1097 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1098 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1099 args[i].pass_on_stack = 1;
1101 /* If this is an addressable type, we must preallocate the stack
1102 since we must evaluate the object into its final location.
1104 If this is to be passed in both registers and the stack, it is simpler
1105 to preallocate. */
1106 if (TREE_ADDRESSABLE (type)
1107 || (args[i].pass_on_stack && args[i].reg != 0))
1108 *must_preallocate = 1;
1110 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1111 we cannot consider this function call constant. */
1112 if (TREE_ADDRESSABLE (type))
1113 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1115 /* Compute the stack-size of this argument. */
1116 if (args[i].reg == 0 || args[i].partial != 0
1117 || reg_parm_stack_space > 0
1118 || args[i].pass_on_stack)
1119 locate_and_pad_parm (mode, type,
1120 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1122 #else
1123 args[i].reg != 0,
1124 #endif
1125 args[i].pass_on_stack ? 0 : args[i].partial,
1126 fndecl, args_size, &args[i].locate);
1127 #ifdef BLOCK_REG_PADDING
1128 else
1129 /* The argument is passed entirely in registers. See at which
1130 end it should be padded. */
1131 args[i].locate.where_pad =
1132 BLOCK_REG_PADDING (mode, type,
1133 int_size_in_bytes (type) <= UNITS_PER_WORD);
1134 #endif
1136 /* Update ARGS_SIZE, the total stack space for args so far. */
1138 args_size->constant += args[i].locate.size.constant;
1139 if (args[i].locate.size.var)
1140 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1142 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1143 have been used, etc. */
1145 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1146 argpos < n_named_args);
1150 /* Update ARGS_SIZE to contain the total size for the argument block.
1151 Return the original constant component of the argument block's size.
1153 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1154 for arguments passed in registers. */
1156 static int
1157 compute_argument_block_size (int reg_parm_stack_space,
1158 struct args_size *args_size,
1159 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1161 int unadjusted_args_size = args_size->constant;
1163 /* For accumulate outgoing args mode we don't need to align, since the frame
1164 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1165 backends from generating misaligned frame sizes. */
1166 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1167 preferred_stack_boundary = STACK_BOUNDARY;
1169 /* Compute the actual size of the argument block required. The variable
1170 and constant sizes must be combined, the size may have to be rounded,
1171 and there may be a minimum required size. */
1173 if (args_size->var)
1175 args_size->var = ARGS_SIZE_TREE (*args_size);
1176 args_size->constant = 0;
1178 preferred_stack_boundary /= BITS_PER_UNIT;
1179 if (preferred_stack_boundary > 1)
1181 /* We don't handle this case yet. To handle it correctly we have
1182 to add the delta, round and subtract the delta.
1183 Currently no machine description requires this support. */
1184 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
1185 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1188 if (reg_parm_stack_space > 0)
1190 args_size->var
1191 = size_binop (MAX_EXPR, args_size->var,
1192 ssize_int (reg_parm_stack_space));
1194 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1195 /* The area corresponding to register parameters is not to count in
1196 the size of the block we need. So make the adjustment. */
1197 args_size->var
1198 = size_binop (MINUS_EXPR, args_size->var,
1199 ssize_int (reg_parm_stack_space));
1200 #endif
1203 else
1205 preferred_stack_boundary /= BITS_PER_UNIT;
1206 if (preferred_stack_boundary < 1)
1207 preferred_stack_boundary = 1;
1208 args_size->constant = (((args_size->constant
1209 + stack_pointer_delta
1210 + preferred_stack_boundary - 1)
1211 / preferred_stack_boundary
1212 * preferred_stack_boundary)
1213 - stack_pointer_delta);
1215 args_size->constant = MAX (args_size->constant,
1216 reg_parm_stack_space);
1218 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1219 args_size->constant -= reg_parm_stack_space;
1220 #endif
1222 return unadjusted_args_size;
1225 /* Precompute parameters as needed for a function call.
1227 FLAGS is mask of ECF_* constants.
1229 NUM_ACTUALS is the number of arguments.
1231 ARGS is an array containing information for each argument; this
1232 routine fills in the INITIAL_VALUE and VALUE fields for each
1233 precomputed argument. */
1235 static void
1236 precompute_arguments (int flags, int num_actuals, struct arg_data *args)
1238 int i;
1240 /* If this is a libcall, then precompute all arguments so that we do not
1241 get extraneous instructions emitted as part of the libcall sequence. */
1242 if ((flags & ECF_LIBCALL_BLOCK) == 0)
1243 return;
1245 for (i = 0; i < num_actuals; i++)
1247 enum machine_mode mode;
1249 /* If this is an addressable type, we cannot pre-evaluate it. */
1250 gcc_assert (!TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)));
1252 args[i].initial_value = args[i].value
1253 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1255 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1256 if (mode != args[i].mode)
1258 args[i].value
1259 = convert_modes (args[i].mode, mode,
1260 args[i].value, args[i].unsignedp);
1261 #if defined(PROMOTE_FUNCTION_MODE) && !defined(PROMOTE_MODE)
1262 /* CSE will replace this only if it contains args[i].value
1263 pseudo, so convert it down to the declared mode using
1264 a SUBREG. */
1265 if (REG_P (args[i].value)
1266 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1268 args[i].initial_value
1269 = gen_lowpart_SUBREG (mode, args[i].value);
1270 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1271 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1272 args[i].unsignedp);
1274 #endif
1279 /* Given the current state of MUST_PREALLOCATE and information about
1280 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1281 compute and return the final value for MUST_PREALLOCATE. */
1283 static int
1284 finalize_must_preallocate (int must_preallocate, int num_actuals, struct arg_data *args, struct args_size *args_size)
1286 /* See if we have or want to preallocate stack space.
1288 If we would have to push a partially-in-regs parm
1289 before other stack parms, preallocate stack space instead.
1291 If the size of some parm is not a multiple of the required stack
1292 alignment, we must preallocate.
1294 If the total size of arguments that would otherwise create a copy in
1295 a temporary (such as a CALL) is more than half the total argument list
1296 size, preallocation is faster.
1298 Another reason to preallocate is if we have a machine (like the m88k)
1299 where stack alignment is required to be maintained between every
1300 pair of insns, not just when the call is made. However, we assume here
1301 that such machines either do not have push insns (and hence preallocation
1302 would occur anyway) or the problem is taken care of with
1303 PUSH_ROUNDING. */
1305 if (! must_preallocate)
1307 int partial_seen = 0;
1308 int copy_to_evaluate_size = 0;
1309 int i;
1311 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1313 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1314 partial_seen = 1;
1315 else if (partial_seen && args[i].reg == 0)
1316 must_preallocate = 1;
1318 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1319 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1320 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1321 || TREE_CODE (args[i].tree_value) == COND_EXPR
1322 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1323 copy_to_evaluate_size
1324 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1327 if (copy_to_evaluate_size * 2 >= args_size->constant
1328 && args_size->constant > 0)
1329 must_preallocate = 1;
1331 return must_preallocate;
1334 /* If we preallocated stack space, compute the address of each argument
1335 and store it into the ARGS array.
1337 We need not ensure it is a valid memory address here; it will be
1338 validized when it is used.
1340 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1342 static void
1343 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1345 if (argblock)
1347 rtx arg_reg = argblock;
1348 int i, arg_offset = 0;
1350 if (GET_CODE (argblock) == PLUS)
1351 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1353 for (i = 0; i < num_actuals; i++)
1355 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1356 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1357 rtx addr;
1358 unsigned int align, boundary;
1360 /* Skip this parm if it will not be passed on the stack. */
1361 if (! args[i].pass_on_stack && args[i].reg != 0)
1362 continue;
1364 if (GET_CODE (offset) == CONST_INT)
1365 addr = plus_constant (arg_reg, INTVAL (offset));
1366 else
1367 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1369 addr = plus_constant (addr, arg_offset);
1370 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1371 set_mem_attributes (args[i].stack,
1372 TREE_TYPE (args[i].tree_value), 1);
1373 align = BITS_PER_UNIT;
1374 boundary = args[i].locate.boundary;
1375 if (args[i].locate.where_pad != downward)
1376 align = boundary;
1377 else if (GET_CODE (offset) == CONST_INT)
1379 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1380 align = align & -align;
1382 set_mem_align (args[i].stack, align);
1384 if (GET_CODE (slot_offset) == CONST_INT)
1385 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1386 else
1387 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1389 addr = plus_constant (addr, arg_offset);
1390 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1391 set_mem_attributes (args[i].stack_slot,
1392 TREE_TYPE (args[i].tree_value), 1);
1393 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
1395 /* Function incoming arguments may overlap with sibling call
1396 outgoing arguments and we cannot allow reordering of reads
1397 from function arguments with stores to outgoing arguments
1398 of sibling calls. */
1399 set_mem_alias_set (args[i].stack, 0);
1400 set_mem_alias_set (args[i].stack_slot, 0);
1405 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1406 in a call instruction.
1408 FNDECL is the tree node for the target function. For an indirect call
1409 FNDECL will be NULL_TREE.
1411 ADDR is the operand 0 of CALL_EXPR for this call. */
1413 static rtx
1414 rtx_for_function_call (tree fndecl, tree addr)
1416 rtx funexp;
1418 /* Get the function to call, in the form of RTL. */
1419 if (fndecl)
1421 /* If this is the first use of the function, see if we need to
1422 make an external definition for it. */
1423 if (! TREE_USED (fndecl))
1425 assemble_external (fndecl);
1426 TREE_USED (fndecl) = 1;
1429 /* Get a SYMBOL_REF rtx for the function address. */
1430 funexp = XEXP (DECL_RTL (fndecl), 0);
1432 else
1433 /* Generate an rtx (probably a pseudo-register) for the address. */
1435 push_temp_slots ();
1436 funexp = expand_expr (addr, NULL_RTX, VOIDmode, 0);
1437 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1439 return funexp;
1442 /* Do the register loads required for any wholly-register parms or any
1443 parms which are passed both on the stack and in a register. Their
1444 expressions were already evaluated.
1446 Mark all register-parms as living through the call, putting these USE
1447 insns in the CALL_INSN_FUNCTION_USAGE field.
1449 When IS_SIBCALL, perform the check_sibcall_overlap_argument_overlap
1450 checking, setting *SIBCALL_FAILURE if appropriate. */
1452 static void
1453 load_register_parameters (struct arg_data *args, int num_actuals,
1454 rtx *call_fusage, int flags, int is_sibcall,
1455 int *sibcall_failure)
1457 int i, j;
1459 for (i = 0; i < num_actuals; i++)
1461 rtx reg = ((flags & ECF_SIBCALL)
1462 ? args[i].tail_call_reg : args[i].reg);
1463 if (reg)
1465 int partial = args[i].partial;
1466 int nregs;
1467 int size = 0;
1468 rtx before_arg = get_last_insn ();
1469 /* Set to non-negative if must move a word at a time, even if just
1470 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1471 we just use a normal move insn. This value can be zero if the
1472 argument is a zero size structure with no fields. */
1473 nregs = -1;
1474 if (GET_CODE (reg) == PARALLEL)
1476 else if (partial)
1478 gcc_assert (partial % UNITS_PER_WORD == 0);
1479 nregs = partial / UNITS_PER_WORD;
1481 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1483 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1484 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1486 else
1487 size = GET_MODE_SIZE (args[i].mode);
1489 /* Handle calls that pass values in multiple non-contiguous
1490 locations. The Irix 6 ABI has examples of this. */
1492 if (GET_CODE (reg) == PARALLEL)
1493 emit_group_move (reg, args[i].parallel_value);
1495 /* If simple case, just do move. If normal partial, store_one_arg
1496 has already loaded the register for us. In all other cases,
1497 load the register(s) from memory. */
1499 else if (nregs == -1)
1501 emit_move_insn (reg, args[i].value);
1502 #ifdef BLOCK_REG_PADDING
1503 /* Handle case where we have a value that needs shifting
1504 up to the msb. eg. a QImode value and we're padding
1505 upward on a BYTES_BIG_ENDIAN machine. */
1506 if (size < UNITS_PER_WORD
1507 && (args[i].locate.where_pad
1508 == (BYTES_BIG_ENDIAN ? upward : downward)))
1510 rtx x;
1511 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1513 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1514 report the whole reg as used. Strictly speaking, the
1515 call only uses SIZE bytes at the msb end, but it doesn't
1516 seem worth generating rtl to say that. */
1517 reg = gen_rtx_REG (word_mode, REGNO (reg));
1518 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
1519 build_int_cst (NULL_TREE, shift),
1520 reg, 1);
1521 if (x != reg)
1522 emit_move_insn (reg, x);
1524 #endif
1527 /* If we have pre-computed the values to put in the registers in
1528 the case of non-aligned structures, copy them in now. */
1530 else if (args[i].n_aligned_regs != 0)
1531 for (j = 0; j < args[i].n_aligned_regs; j++)
1532 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1533 args[i].aligned_regs[j]);
1535 else if (partial == 0 || args[i].pass_on_stack)
1537 rtx mem = validize_mem (args[i].value);
1539 /* Handle a BLKmode that needs shifting. */
1540 if (nregs == 1 && size < UNITS_PER_WORD
1541 #ifdef BLOCK_REG_PADDING
1542 && args[i].locate.where_pad == downward
1543 #else
1544 && BYTES_BIG_ENDIAN
1545 #endif
1548 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1549 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1550 rtx x = gen_reg_rtx (word_mode);
1551 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1552 enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1553 : LSHIFT_EXPR;
1555 emit_move_insn (x, tem);
1556 x = expand_shift (dir, word_mode, x,
1557 build_int_cst (NULL_TREE, shift),
1558 ri, 1);
1559 if (x != ri)
1560 emit_move_insn (ri, x);
1562 else
1563 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1566 /* When a parameter is a block, and perhaps in other cases, it is
1567 possible that it did a load from an argument slot that was
1568 already clobbered. */
1569 if (is_sibcall
1570 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1571 *sibcall_failure = 1;
1573 /* Handle calls that pass values in multiple non-contiguous
1574 locations. The Irix 6 ABI has examples of this. */
1575 if (GET_CODE (reg) == PARALLEL)
1576 use_group_regs (call_fusage, reg);
1577 else if (nregs == -1)
1578 use_reg (call_fusage, reg);
1579 else if (nregs > 0)
1580 use_regs (call_fusage, REGNO (reg), nregs);
1585 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1586 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1587 bytes, then we would need to push some additional bytes to pad the
1588 arguments. So, we compute an adjust to the stack pointer for an
1589 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1590 bytes. Then, when the arguments are pushed the stack will be perfectly
1591 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1592 be popped after the call. Returns the adjustment. */
1594 static int
1595 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1596 struct args_size *args_size,
1597 unsigned int preferred_unit_stack_boundary)
1599 /* The number of bytes to pop so that the stack will be
1600 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1601 HOST_WIDE_INT adjustment;
1602 /* The alignment of the stack after the arguments are pushed, if we
1603 just pushed the arguments without adjust the stack here. */
1604 unsigned HOST_WIDE_INT unadjusted_alignment;
1606 unadjusted_alignment
1607 = ((stack_pointer_delta + unadjusted_args_size)
1608 % preferred_unit_stack_boundary);
1610 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1611 as possible -- leaving just enough left to cancel out the
1612 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1613 PENDING_STACK_ADJUST is non-negative, and congruent to
1614 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1616 /* Begin by trying to pop all the bytes. */
1617 unadjusted_alignment
1618 = (unadjusted_alignment
1619 - (pending_stack_adjust % preferred_unit_stack_boundary));
1620 adjustment = pending_stack_adjust;
1621 /* Push enough additional bytes that the stack will be aligned
1622 after the arguments are pushed. */
1623 if (preferred_unit_stack_boundary > 1)
1625 if (unadjusted_alignment > 0)
1626 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1627 else
1628 adjustment += unadjusted_alignment;
1631 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1632 bytes after the call. The right number is the entire
1633 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1634 by the arguments in the first place. */
1635 args_size->constant
1636 = pending_stack_adjust - adjustment + unadjusted_args_size;
1638 return adjustment;
1641 /* Scan X expression if it does not dereference any argument slots
1642 we already clobbered by tail call arguments (as noted in stored_args_map
1643 bitmap).
1644 Return nonzero if X expression dereferences such argument slots,
1645 zero otherwise. */
1647 static int
1648 check_sibcall_argument_overlap_1 (rtx x)
1650 RTX_CODE code;
1651 int i, j;
1652 unsigned int k;
1653 const char *fmt;
1655 if (x == NULL_RTX)
1656 return 0;
1658 code = GET_CODE (x);
1660 if (code == MEM)
1662 if (XEXP (x, 0) == current_function_internal_arg_pointer)
1663 i = 0;
1664 else if (GET_CODE (XEXP (x, 0)) == PLUS
1665 && XEXP (XEXP (x, 0), 0) ==
1666 current_function_internal_arg_pointer
1667 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
1668 i = INTVAL (XEXP (XEXP (x, 0), 1));
1669 else
1670 return 0;
1672 #ifdef ARGS_GROW_DOWNWARD
1673 i = -i - GET_MODE_SIZE (GET_MODE (x));
1674 #endif
1676 for (k = 0; k < GET_MODE_SIZE (GET_MODE (x)); k++)
1677 if (i + k < stored_args_map->n_bits
1678 && TEST_BIT (stored_args_map, i + k))
1679 return 1;
1681 return 0;
1684 /* Scan all subexpressions. */
1685 fmt = GET_RTX_FORMAT (code);
1686 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1688 if (*fmt == 'e')
1690 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1691 return 1;
1693 else if (*fmt == 'E')
1695 for (j = 0; j < XVECLEN (x, i); j++)
1696 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1697 return 1;
1700 return 0;
1703 /* Scan sequence after INSN if it does not dereference any argument slots
1704 we already clobbered by tail call arguments (as noted in stored_args_map
1705 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1706 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1707 should be 0). Return nonzero if sequence after INSN dereferences such argument
1708 slots, zero otherwise. */
1710 static int
1711 check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
1713 int low, high;
1715 if (insn == NULL_RTX)
1716 insn = get_insns ();
1717 else
1718 insn = NEXT_INSN (insn);
1720 for (; insn; insn = NEXT_INSN (insn))
1721 if (INSN_P (insn)
1722 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
1723 break;
1725 if (mark_stored_args_map)
1727 #ifdef ARGS_GROW_DOWNWARD
1728 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
1729 #else
1730 low = arg->locate.slot_offset.constant;
1731 #endif
1733 for (high = low + arg->locate.size.constant; low < high; low++)
1734 SET_BIT (stored_args_map, low);
1736 return insn != NULL_RTX;
1739 /* Given that a function returns a value of mode MODE at the most
1740 significant end of hard register VALUE, shift VALUE left or right
1741 as specified by LEFT_P. Return true if some action was needed. */
1743 bool
1744 shift_return_value (enum machine_mode mode, bool left_p, rtx value)
1746 HOST_WIDE_INT shift;
1748 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
1749 shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
1750 if (shift == 0)
1751 return false;
1753 /* Use ashr rather than lshr for right shifts. This is for the benefit
1754 of the MIPS port, which requires SImode values to be sign-extended
1755 when stored in 64-bit registers. */
1756 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
1757 value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
1758 gcc_unreachable ();
1759 return true;
1762 /* Remove all REG_EQUIV notes found in the insn chain. */
1764 static void
1765 purge_reg_equiv_notes (void)
1767 rtx insn;
1769 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1771 while (1)
1773 rtx note = find_reg_note (insn, REG_EQUIV, 0);
1774 if (note)
1776 /* Remove the note and keep looking at the notes for
1777 this insn. */
1778 remove_note (insn, note);
1779 continue;
1781 break;
1786 /* Generate all the code for a function call
1787 and return an rtx for its value.
1788 Store the value in TARGET (specified as an rtx) if convenient.
1789 If the value is stored in TARGET then TARGET is returned.
1790 If IGNORE is nonzero, then we ignore the value of the function call. */
1793 expand_call (tree exp, rtx target, int ignore)
1795 /* Nonzero if we are currently expanding a call. */
1796 static int currently_expanding_call = 0;
1798 /* List of actual parameters. */
1799 tree actparms = TREE_OPERAND (exp, 1);
1800 /* RTX for the function to be called. */
1801 rtx funexp;
1802 /* Sequence of insns to perform a normal "call". */
1803 rtx normal_call_insns = NULL_RTX;
1804 /* Sequence of insns to perform a tail "call". */
1805 rtx tail_call_insns = NULL_RTX;
1806 /* Data type of the function. */
1807 tree funtype;
1808 tree type_arg_types;
1809 /* Declaration of the function being called,
1810 or 0 if the function is computed (not known by name). */
1811 tree fndecl = 0;
1812 /* The type of the function being called. */
1813 tree fntype;
1814 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
1815 int pass;
1817 /* Register in which non-BLKmode value will be returned,
1818 or 0 if no value or if value is BLKmode. */
1819 rtx valreg;
1820 /* Address where we should return a BLKmode value;
1821 0 if value not BLKmode. */
1822 rtx structure_value_addr = 0;
1823 /* Nonzero if that address is being passed by treating it as
1824 an extra, implicit first parameter. Otherwise,
1825 it is passed by being copied directly into struct_value_rtx. */
1826 int structure_value_addr_parm = 0;
1827 /* Size of aggregate value wanted, or zero if none wanted
1828 or if we are using the non-reentrant PCC calling convention
1829 or expecting the value in registers. */
1830 HOST_WIDE_INT struct_value_size = 0;
1831 /* Nonzero if called function returns an aggregate in memory PCC style,
1832 by returning the address of where to find it. */
1833 int pcc_struct_value = 0;
1834 rtx struct_value = 0;
1836 /* Number of actual parameters in this call, including struct value addr. */
1837 int num_actuals;
1838 /* Number of named args. Args after this are anonymous ones
1839 and they must all go on the stack. */
1840 int n_named_args;
1842 /* Vector of information about each argument.
1843 Arguments are numbered in the order they will be pushed,
1844 not the order they are written. */
1845 struct arg_data *args;
1847 /* Total size in bytes of all the stack-parms scanned so far. */
1848 struct args_size args_size;
1849 struct args_size adjusted_args_size;
1850 /* Size of arguments before any adjustments (such as rounding). */
1851 int unadjusted_args_size;
1852 /* Data on reg parms scanned so far. */
1853 CUMULATIVE_ARGS args_so_far;
1854 /* Nonzero if a reg parm has been scanned. */
1855 int reg_parm_seen;
1856 /* Nonzero if this is an indirect function call. */
1858 /* Nonzero if we must avoid push-insns in the args for this call.
1859 If stack space is allocated for register parameters, but not by the
1860 caller, then it is preallocated in the fixed part of the stack frame.
1861 So the entire argument block must then be preallocated (i.e., we
1862 ignore PUSH_ROUNDING in that case). */
1864 int must_preallocate = !PUSH_ARGS;
1866 /* Size of the stack reserved for parameter registers. */
1867 int reg_parm_stack_space = 0;
1869 /* Address of space preallocated for stack parms
1870 (on machines that lack push insns), or 0 if space not preallocated. */
1871 rtx argblock = 0;
1873 /* Mask of ECF_ flags. */
1874 int flags = 0;
1875 #ifdef REG_PARM_STACK_SPACE
1876 /* Define the boundary of the register parm stack space that needs to be
1877 saved, if any. */
1878 int low_to_save, high_to_save;
1879 rtx save_area = 0; /* Place that it is saved */
1880 #endif
1882 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1883 char *initial_stack_usage_map = stack_usage_map;
1885 int old_stack_allocated;
1887 /* State variables to track stack modifications. */
1888 rtx old_stack_level = 0;
1889 int old_stack_arg_under_construction = 0;
1890 int old_pending_adj = 0;
1891 int old_inhibit_defer_pop = inhibit_defer_pop;
1893 /* Some stack pointer alterations we make are performed via
1894 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
1895 which we then also need to save/restore along the way. */
1896 int old_stack_pointer_delta = 0;
1898 rtx call_fusage;
1899 tree p = TREE_OPERAND (exp, 0);
1900 tree addr = TREE_OPERAND (exp, 0);
1901 int i;
1902 /* The alignment of the stack, in bits. */
1903 unsigned HOST_WIDE_INT preferred_stack_boundary;
1904 /* The alignment of the stack, in bytes. */
1905 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
1906 /* The static chain value to use for this call. */
1907 rtx static_chain_value;
1908 /* See if this is "nothrow" function call. */
1909 if (TREE_NOTHROW (exp))
1910 flags |= ECF_NOTHROW;
1912 /* See if we can find a DECL-node for the actual function, and get the
1913 function attributes (flags) from the function decl or type node. */
1914 fndecl = get_callee_fndecl (exp);
1915 if (fndecl)
1917 fntype = TREE_TYPE (fndecl);
1918 flags |= flags_from_decl_or_type (fndecl);
1920 else
1922 fntype = TREE_TYPE (TREE_TYPE (p));
1923 flags |= flags_from_decl_or_type (fntype);
1926 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
1928 /* Warn if this value is an aggregate type,
1929 regardless of which calling convention we are using for it. */
1930 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
1931 warning ("function call has aggregate value");
1933 /* If the result of a pure or const function call is ignored (or void),
1934 and none of its arguments are volatile, we can avoid expanding the
1935 call and just evaluate the arguments for side-effects. */
1936 if ((flags & (ECF_CONST | ECF_PURE))
1937 && (ignore || target == const0_rtx
1938 || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
1940 bool volatilep = false;
1941 tree arg;
1943 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
1944 if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
1946 volatilep = true;
1947 break;
1950 if (! volatilep)
1952 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
1953 expand_expr (TREE_VALUE (arg), const0_rtx,
1954 VOIDmode, EXPAND_NORMAL);
1955 return const0_rtx;
1959 #ifdef REG_PARM_STACK_SPACE
1960 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1961 #endif
1963 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1964 if (reg_parm_stack_space > 0 && PUSH_ARGS)
1965 must_preallocate = 1;
1966 #endif
1968 /* Set up a place to return a structure. */
1970 /* Cater to broken compilers. */
1971 if (aggregate_value_p (exp, fndecl))
1973 /* This call returns a big structure. */
1974 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1976 #ifdef PCC_STATIC_STRUCT_RETURN
1978 pcc_struct_value = 1;
1980 #else /* not PCC_STATIC_STRUCT_RETURN */
1982 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
1984 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (exp))
1986 /* The structure value address arg is already in actparms.
1987 Pull it out. It might be nice to just leave it there, but
1988 we need to set structure_value_addr. */
1989 tree return_arg = TREE_VALUE (actparms);
1990 actparms = TREE_CHAIN (actparms);
1991 structure_value_addr = expand_expr (return_arg, NULL_RTX,
1992 VOIDmode, EXPAND_NORMAL);
1994 else if (target && MEM_P (target))
1995 structure_value_addr = XEXP (target, 0);
1996 else
1998 /* For variable-sized objects, we must be called with a target
1999 specified. If we were to allocate space on the stack here,
2000 we would have no way of knowing when to free it. */
2001 rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
2003 mark_temp_addr_taken (d);
2004 structure_value_addr = XEXP (d, 0);
2005 target = 0;
2008 #endif /* not PCC_STATIC_STRUCT_RETURN */
2011 /* Figure out the amount to which the stack should be aligned. */
2012 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2013 if (fndecl)
2015 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2016 if (i && i->preferred_incoming_stack_boundary)
2017 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2020 /* Operand 0 is a pointer-to-function; get the type of the function. */
2021 funtype = TREE_TYPE (addr);
2022 gcc_assert (POINTER_TYPE_P (funtype));
2023 funtype = TREE_TYPE (funtype);
2025 /* Munge the tree to split complex arguments into their imaginary
2026 and real parts. */
2027 if (targetm.calls.split_complex_arg)
2029 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2030 actparms = split_complex_values (actparms);
2032 else
2033 type_arg_types = TYPE_ARG_TYPES (funtype);
2035 if (flags & ECF_MAY_BE_ALLOCA)
2036 current_function_calls_alloca = 1;
2038 /* If struct_value_rtx is 0, it means pass the address
2039 as if it were an extra parameter. */
2040 if (structure_value_addr && struct_value == 0)
2042 /* If structure_value_addr is a REG other than
2043 virtual_outgoing_args_rtx, we can use always use it. If it
2044 is not a REG, we must always copy it into a register.
2045 If it is virtual_outgoing_args_rtx, we must copy it to another
2046 register in some cases. */
2047 rtx temp = (!REG_P (structure_value_addr)
2048 || (ACCUMULATE_OUTGOING_ARGS
2049 && stack_arg_under_construction
2050 && structure_value_addr == virtual_outgoing_args_rtx)
2051 ? copy_addr_to_reg (convert_memory_address
2052 (Pmode, structure_value_addr))
2053 : structure_value_addr);
2055 actparms
2056 = tree_cons (error_mark_node,
2057 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2058 temp),
2059 actparms);
2060 structure_value_addr_parm = 1;
2063 /* Count the arguments and set NUM_ACTUALS. */
2064 for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2065 num_actuals++;
2067 /* Compute number of named args.
2068 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2070 if (type_arg_types != 0)
2071 n_named_args
2072 = (list_length (type_arg_types)
2073 /* Count the struct value address, if it is passed as a parm. */
2074 + structure_value_addr_parm);
2075 else
2076 /* If we know nothing, treat all args as named. */
2077 n_named_args = num_actuals;
2079 /* Start updating where the next arg would go.
2081 On some machines (such as the PA) indirect calls have a different
2082 calling convention than normal calls. The fourth argument in
2083 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2084 or not. */
2085 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl, n_named_args);
2087 /* Now possibly adjust the number of named args.
2088 Normally, don't include the last named arg if anonymous args follow.
2089 We do include the last named arg if
2090 targetm.calls.strict_argument_naming() returns nonzero.
2091 (If no anonymous args follow, the result of list_length is actually
2092 one too large. This is harmless.)
2094 If targetm.calls.pretend_outgoing_varargs_named() returns
2095 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2096 this machine will be able to place unnamed args that were passed
2097 in registers into the stack. So treat all args as named. This
2098 allows the insns emitting for a specific argument list to be
2099 independent of the function declaration.
2101 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2102 we do not have any reliable way to pass unnamed args in
2103 registers, so we must force them into memory. */
2105 if (type_arg_types != 0
2106 && targetm.calls.strict_argument_naming (&args_so_far))
2108 else if (type_arg_types != 0
2109 && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
2110 /* Don't include the last named arg. */
2111 --n_named_args;
2112 else
2113 /* Treat all args as named. */
2114 n_named_args = num_actuals;
2116 /* Make a vector to hold all the information about each arg. */
2117 args = alloca (num_actuals * sizeof (struct arg_data));
2118 memset (args, 0, num_actuals * sizeof (struct arg_data));
2120 /* Build up entries in the ARGS array, compute the size of the
2121 arguments into ARGS_SIZE, etc. */
2122 initialize_argument_information (num_actuals, args, &args_size,
2123 n_named_args, actparms, fndecl,
2124 &args_so_far, reg_parm_stack_space,
2125 &old_stack_level, &old_pending_adj,
2126 &must_preallocate, &flags,
2127 &try_tail_call, CALL_FROM_THUNK_P (exp));
2129 if (args_size.var)
2131 /* If this function requires a variable-sized argument list, don't
2132 try to make a cse'able block for this call. We may be able to
2133 do this eventually, but it is too complicated to keep track of
2134 what insns go in the cse'able block and which don't. */
2136 flags &= ~ECF_LIBCALL_BLOCK;
2137 must_preallocate = 1;
2140 /* Now make final decision about preallocating stack space. */
2141 must_preallocate = finalize_must_preallocate (must_preallocate,
2142 num_actuals, args,
2143 &args_size);
2145 /* If the structure value address will reference the stack pointer, we
2146 must stabilize it. We don't need to do this if we know that we are
2147 not going to adjust the stack pointer in processing this call. */
2149 if (structure_value_addr
2150 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2151 || reg_mentioned_p (virtual_outgoing_args_rtx,
2152 structure_value_addr))
2153 && (args_size.var
2154 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2155 structure_value_addr = copy_to_reg (structure_value_addr);
2157 /* Tail calls can make things harder to debug, and we've traditionally
2158 pushed these optimizations into -O2. Don't try if we're already
2159 expanding a call, as that means we're an argument. Don't try if
2160 there's cleanups, as we know there's code to follow the call. */
2162 if (currently_expanding_call++ != 0
2163 || !flag_optimize_sibling_calls
2164 || args_size.var
2165 || lookup_stmt_eh_region (exp) >= 0)
2166 try_tail_call = 0;
2168 /* Rest of purposes for tail call optimizations to fail. */
2169 if (
2170 #ifdef HAVE_sibcall_epilogue
2171 !HAVE_sibcall_epilogue
2172 #else
2174 #endif
2175 || !try_tail_call
2176 /* Doing sibling call optimization needs some work, since
2177 structure_value_addr can be allocated on the stack.
2178 It does not seem worth the effort since few optimizable
2179 sibling calls will return a structure. */
2180 || structure_value_addr != NULL_RTX
2181 /* Check whether the target is able to optimize the call
2182 into a sibcall. */
2183 || !targetm.function_ok_for_sibcall (fndecl, exp)
2184 /* Functions that do not return exactly once may not be sibcall
2185 optimized. */
2186 || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
2187 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2188 /* If the called function is nested in the current one, it might access
2189 some of the caller's arguments, but could clobber them beforehand if
2190 the argument areas are shared. */
2191 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2192 /* If this function requires more stack slots than the current
2193 function, we cannot change it into a sibling call. */
2194 || args_size.constant > current_function_args_size
2195 /* If the callee pops its own arguments, then it must pop exactly
2196 the same number of arguments as the current function. */
2197 || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2198 != RETURN_POPS_ARGS (current_function_decl,
2199 TREE_TYPE (current_function_decl),
2200 current_function_args_size))
2201 || !lang_hooks.decls.ok_for_sibcall (fndecl))
2202 try_tail_call = 0;
2204 /* Ensure current function's preferred stack boundary is at least
2205 what we need. We don't have to increase alignment for recursive
2206 functions. */
2207 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2208 && fndecl != current_function_decl)
2209 cfun->preferred_stack_boundary = preferred_stack_boundary;
2210 if (fndecl == current_function_decl)
2211 cfun->recursive_call_emit = true;
2213 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2215 /* We want to make two insn chains; one for a sibling call, the other
2216 for a normal call. We will select one of the two chains after
2217 initial RTL generation is complete. */
2218 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2220 int sibcall_failure = 0;
2221 /* We want to emit any pending stack adjustments before the tail
2222 recursion "call". That way we know any adjustment after the tail
2223 recursion call can be ignored if we indeed use the tail
2224 call expansion. */
2225 int save_pending_stack_adjust = 0;
2226 int save_stack_pointer_delta = 0;
2227 rtx insns;
2228 rtx before_call, next_arg_reg;
2230 if (pass == 0)
2232 /* State variables we need to save and restore between
2233 iterations. */
2234 save_pending_stack_adjust = pending_stack_adjust;
2235 save_stack_pointer_delta = stack_pointer_delta;
2237 if (pass)
2238 flags &= ~ECF_SIBCALL;
2239 else
2240 flags |= ECF_SIBCALL;
2242 /* Other state variables that we must reinitialize each time
2243 through the loop (that are not initialized by the loop itself). */
2244 argblock = 0;
2245 call_fusage = 0;
2247 /* Start a new sequence for the normal call case.
2249 From this point on, if the sibling call fails, we want to set
2250 sibcall_failure instead of continuing the loop. */
2251 start_sequence ();
2253 /* Don't let pending stack adjusts add up to too much.
2254 Also, do all pending adjustments now if there is any chance
2255 this might be a call to alloca or if we are expanding a sibling
2256 call sequence or if we are calling a function that is to return
2257 with stack pointer depressed. */
2258 if (pending_stack_adjust >= 32
2259 || (pending_stack_adjust > 0
2260 && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
2261 || pass == 0)
2262 do_pending_stack_adjust ();
2264 /* When calling a const function, we must pop the stack args right away,
2265 so that the pop is deleted or moved with the call. */
2266 if (pass && (flags & ECF_LIBCALL_BLOCK))
2267 NO_DEFER_POP;
2269 /* Precompute any arguments as needed. */
2270 if (pass)
2271 precompute_arguments (flags, num_actuals, args);
2273 /* Now we are about to start emitting insns that can be deleted
2274 if a libcall is deleted. */
2275 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
2276 start_sequence ();
2278 adjusted_args_size = args_size;
2279 /* Compute the actual size of the argument block required. The variable
2280 and constant sizes must be combined, the size may have to be rounded,
2281 and there may be a minimum required size. When generating a sibcall
2282 pattern, do not round up, since we'll be re-using whatever space our
2283 caller provided. */
2284 unadjusted_args_size
2285 = compute_argument_block_size (reg_parm_stack_space,
2286 &adjusted_args_size,
2287 (pass == 0 ? 0
2288 : preferred_stack_boundary));
2290 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2292 /* The argument block when performing a sibling call is the
2293 incoming argument block. */
2294 if (pass == 0)
2296 argblock = virtual_incoming_args_rtx;
2297 argblock
2298 #ifdef STACK_GROWS_DOWNWARD
2299 = plus_constant (argblock, current_function_pretend_args_size);
2300 #else
2301 = plus_constant (argblock, -current_function_pretend_args_size);
2302 #endif
2303 stored_args_map = sbitmap_alloc (args_size.constant);
2304 sbitmap_zero (stored_args_map);
2307 /* If we have no actual push instructions, or shouldn't use them,
2308 make space for all args right now. */
2309 else if (adjusted_args_size.var != 0)
2311 if (old_stack_level == 0)
2313 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2314 old_stack_pointer_delta = stack_pointer_delta;
2315 old_pending_adj = pending_stack_adjust;
2316 pending_stack_adjust = 0;
2317 /* stack_arg_under_construction says whether a stack arg is
2318 being constructed at the old stack level. Pushing the stack
2319 gets a clean outgoing argument block. */
2320 old_stack_arg_under_construction = stack_arg_under_construction;
2321 stack_arg_under_construction = 0;
2323 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2325 else
2327 /* Note that we must go through the motions of allocating an argument
2328 block even if the size is zero because we may be storing args
2329 in the area reserved for register arguments, which may be part of
2330 the stack frame. */
2332 int needed = adjusted_args_size.constant;
2334 /* Store the maximum argument space used. It will be pushed by
2335 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2336 checking). */
2338 if (needed > current_function_outgoing_args_size)
2339 current_function_outgoing_args_size = needed;
2341 if (must_preallocate)
2343 if (ACCUMULATE_OUTGOING_ARGS)
2345 /* Since the stack pointer will never be pushed, it is
2346 possible for the evaluation of a parm to clobber
2347 something we have already written to the stack.
2348 Since most function calls on RISC machines do not use
2349 the stack, this is uncommon, but must work correctly.
2351 Therefore, we save any area of the stack that was already
2352 written and that we are using. Here we set up to do this
2353 by making a new stack usage map from the old one. The
2354 actual save will be done by store_one_arg.
2356 Another approach might be to try to reorder the argument
2357 evaluations to avoid this conflicting stack usage. */
2359 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2360 /* Since we will be writing into the entire argument area,
2361 the map must be allocated for its entire size, not just
2362 the part that is the responsibility of the caller. */
2363 needed += reg_parm_stack_space;
2364 #endif
2366 #ifdef ARGS_GROW_DOWNWARD
2367 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2368 needed + 1);
2369 #else
2370 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2371 needed);
2372 #endif
2373 stack_usage_map = alloca (highest_outgoing_arg_in_use);
2375 if (initial_highest_arg_in_use)
2376 memcpy (stack_usage_map, initial_stack_usage_map,
2377 initial_highest_arg_in_use);
2379 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2380 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2381 (highest_outgoing_arg_in_use
2382 - initial_highest_arg_in_use));
2383 needed = 0;
2385 /* The address of the outgoing argument list must not be
2386 copied to a register here, because argblock would be left
2387 pointing to the wrong place after the call to
2388 allocate_dynamic_stack_space below. */
2390 argblock = virtual_outgoing_args_rtx;
2392 else
2394 if (inhibit_defer_pop == 0)
2396 /* Try to reuse some or all of the pending_stack_adjust
2397 to get this space. */
2398 needed
2399 = (combine_pending_stack_adjustment_and_call
2400 (unadjusted_args_size,
2401 &adjusted_args_size,
2402 preferred_unit_stack_boundary));
2404 /* combine_pending_stack_adjustment_and_call computes
2405 an adjustment before the arguments are allocated.
2406 Account for them and see whether or not the stack
2407 needs to go up or down. */
2408 needed = unadjusted_args_size - needed;
2410 if (needed < 0)
2412 /* We're releasing stack space. */
2413 /* ??? We can avoid any adjustment at all if we're
2414 already aligned. FIXME. */
2415 pending_stack_adjust = -needed;
2416 do_pending_stack_adjust ();
2417 needed = 0;
2419 else
2420 /* We need to allocate space. We'll do that in
2421 push_block below. */
2422 pending_stack_adjust = 0;
2425 /* Special case this because overhead of `push_block' in
2426 this case is non-trivial. */
2427 if (needed == 0)
2428 argblock = virtual_outgoing_args_rtx;
2429 else
2431 argblock = push_block (GEN_INT (needed), 0, 0);
2432 #ifdef ARGS_GROW_DOWNWARD
2433 argblock = plus_constant (argblock, needed);
2434 #endif
2437 /* We only really need to call `copy_to_reg' in the case
2438 where push insns are going to be used to pass ARGBLOCK
2439 to a function call in ARGS. In that case, the stack
2440 pointer changes value from the allocation point to the
2441 call point, and hence the value of
2442 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2443 as well always do it. */
2444 argblock = copy_to_reg (argblock);
2449 if (ACCUMULATE_OUTGOING_ARGS)
2451 /* The save/restore code in store_one_arg handles all
2452 cases except one: a constructor call (including a C
2453 function returning a BLKmode struct) to initialize
2454 an argument. */
2455 if (stack_arg_under_construction)
2457 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2458 rtx push_size = GEN_INT (reg_parm_stack_space
2459 + adjusted_args_size.constant);
2460 #else
2461 rtx push_size = GEN_INT (adjusted_args_size.constant);
2462 #endif
2463 if (old_stack_level == 0)
2465 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2466 NULL_RTX);
2467 old_stack_pointer_delta = stack_pointer_delta;
2468 old_pending_adj = pending_stack_adjust;
2469 pending_stack_adjust = 0;
2470 /* stack_arg_under_construction says whether a stack
2471 arg is being constructed at the old stack level.
2472 Pushing the stack gets a clean outgoing argument
2473 block. */
2474 old_stack_arg_under_construction
2475 = stack_arg_under_construction;
2476 stack_arg_under_construction = 0;
2477 /* Make a new map for the new argument list. */
2478 stack_usage_map = alloca (highest_outgoing_arg_in_use);
2479 memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
2480 highest_outgoing_arg_in_use = 0;
2482 allocate_dynamic_stack_space (push_size, NULL_RTX,
2483 BITS_PER_UNIT);
2486 /* If argument evaluation might modify the stack pointer,
2487 copy the address of the argument list to a register. */
2488 for (i = 0; i < num_actuals; i++)
2489 if (args[i].pass_on_stack)
2491 argblock = copy_addr_to_reg (argblock);
2492 break;
2496 compute_argument_addresses (args, argblock, num_actuals);
2498 /* If we push args individually in reverse order, perform stack alignment
2499 before the first push (the last arg). */
2500 if (PUSH_ARGS_REVERSED && argblock == 0
2501 && adjusted_args_size.constant != unadjusted_args_size)
2503 /* When the stack adjustment is pending, we get better code
2504 by combining the adjustments. */
2505 if (pending_stack_adjust
2506 && ! (flags & ECF_LIBCALL_BLOCK)
2507 && ! inhibit_defer_pop)
2509 pending_stack_adjust
2510 = (combine_pending_stack_adjustment_and_call
2511 (unadjusted_args_size,
2512 &adjusted_args_size,
2513 preferred_unit_stack_boundary));
2514 do_pending_stack_adjust ();
2516 else if (argblock == 0)
2517 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2518 - unadjusted_args_size));
2520 /* Now that the stack is properly aligned, pops can't safely
2521 be deferred during the evaluation of the arguments. */
2522 NO_DEFER_POP;
2524 funexp = rtx_for_function_call (fndecl, addr);
2526 /* Figure out the register where the value, if any, will come back. */
2527 valreg = 0;
2528 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2529 && ! structure_value_addr)
2531 if (pcc_struct_value)
2532 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2533 fndecl, (pass == 0));
2534 else
2535 valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
2538 /* Precompute all register parameters. It isn't safe to compute anything
2539 once we have started filling any specific hard regs. */
2540 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2542 if (TREE_OPERAND (exp, 2))
2543 static_chain_value = expand_expr (TREE_OPERAND (exp, 2),
2544 NULL_RTX, VOIDmode, 0);
2545 else
2546 static_chain_value = 0;
2548 #ifdef REG_PARM_STACK_SPACE
2549 /* Save the fixed argument area if it's part of the caller's frame and
2550 is clobbered by argument setup for this call. */
2551 if (ACCUMULATE_OUTGOING_ARGS && pass)
2552 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2553 &low_to_save, &high_to_save);
2554 #endif
2556 /* Now store (and compute if necessary) all non-register parms.
2557 These come before register parms, since they can require block-moves,
2558 which could clobber the registers used for register parms.
2559 Parms which have partial registers are not stored here,
2560 but we do preallocate space here if they want that. */
2562 for (i = 0; i < num_actuals; i++)
2563 if (args[i].reg == 0 || args[i].pass_on_stack)
2565 rtx before_arg = get_last_insn ();
2567 if (store_one_arg (&args[i], argblock, flags,
2568 adjusted_args_size.var != 0,
2569 reg_parm_stack_space)
2570 || (pass == 0
2571 && check_sibcall_argument_overlap (before_arg,
2572 &args[i], 1)))
2573 sibcall_failure = 1;
2575 if (flags & ECF_CONST
2576 && args[i].stack
2577 && args[i].value == args[i].stack)
2578 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
2579 gen_rtx_USE (VOIDmode,
2580 args[i].value),
2581 call_fusage);
2584 /* If we have a parm that is passed in registers but not in memory
2585 and whose alignment does not permit a direct copy into registers,
2586 make a group of pseudos that correspond to each register that we
2587 will later fill. */
2588 if (STRICT_ALIGNMENT)
2589 store_unaligned_arguments_into_pseudos (args, num_actuals);
2591 /* Now store any partially-in-registers parm.
2592 This is the last place a block-move can happen. */
2593 if (reg_parm_seen)
2594 for (i = 0; i < num_actuals; i++)
2595 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2597 rtx before_arg = get_last_insn ();
2599 if (store_one_arg (&args[i], argblock, flags,
2600 adjusted_args_size.var != 0,
2601 reg_parm_stack_space)
2602 || (pass == 0
2603 && check_sibcall_argument_overlap (before_arg,
2604 &args[i], 1)))
2605 sibcall_failure = 1;
2608 /* If we pushed args in forward order, perform stack alignment
2609 after pushing the last arg. */
2610 if (!PUSH_ARGS_REVERSED && argblock == 0)
2611 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2612 - unadjusted_args_size));
2614 /* If register arguments require space on the stack and stack space
2615 was not preallocated, allocate stack space here for arguments
2616 passed in registers. */
2617 #ifdef OUTGOING_REG_PARM_STACK_SPACE
2618 if (!ACCUMULATE_OUTGOING_ARGS
2619 && must_preallocate == 0 && reg_parm_stack_space > 0)
2620 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2621 #endif
2623 /* Pass the function the address in which to return a
2624 structure value. */
2625 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2627 structure_value_addr
2628 = convert_memory_address (Pmode, structure_value_addr);
2629 emit_move_insn (struct_value,
2630 force_reg (Pmode,
2631 force_operand (structure_value_addr,
2632 NULL_RTX)));
2634 if (REG_P (struct_value))
2635 use_reg (&call_fusage, struct_value);
2638 funexp = prepare_call_address (funexp, static_chain_value,
2639 &call_fusage, reg_parm_seen, pass == 0);
2641 load_register_parameters (args, num_actuals, &call_fusage, flags,
2642 pass == 0, &sibcall_failure);
2644 /* Save a pointer to the last insn before the call, so that we can
2645 later safely search backwards to find the CALL_INSN. */
2646 before_call = get_last_insn ();
2648 /* Set up next argument register. For sibling calls on machines
2649 with register windows this should be the incoming register. */
2650 #ifdef FUNCTION_INCOMING_ARG
2651 if (pass == 0)
2652 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
2653 void_type_node, 1);
2654 else
2655 #endif
2656 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
2657 void_type_node, 1);
2659 /* All arguments and registers used for the call must be set up by
2660 now! */
2662 /* Stack must be properly aligned now. */
2663 gcc_assert (!pass
2664 || !(stack_pointer_delta % preferred_unit_stack_boundary));
2666 /* Generate the actual call instruction. */
2667 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
2668 adjusted_args_size.constant, struct_value_size,
2669 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
2670 flags, & args_so_far);
2672 /* If a non-BLKmode value is returned at the most significant end
2673 of a register, shift the register right by the appropriate amount
2674 and update VALREG accordingly. BLKmode values are handled by the
2675 group load/store machinery below. */
2676 if (!structure_value_addr
2677 && !pcc_struct_value
2678 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2679 && targetm.calls.return_in_msb (TREE_TYPE (exp)))
2681 if (shift_return_value (TYPE_MODE (TREE_TYPE (exp)), false, valreg))
2682 sibcall_failure = 1;
2683 valreg = gen_rtx_REG (TYPE_MODE (TREE_TYPE (exp)), REGNO (valreg));
2686 /* If call is cse'able, make appropriate pair of reg-notes around it.
2687 Test valreg so we don't crash; may safely ignore `const'
2688 if return type is void. Disable for PARALLEL return values, because
2689 we have no way to move such values into a pseudo register. */
2690 if (pass && (flags & ECF_LIBCALL_BLOCK))
2692 rtx insns;
2693 rtx insn;
2694 bool failed = valreg == 0 || GET_CODE (valreg) == PARALLEL;
2696 insns = get_insns ();
2698 /* Expansion of block moves possibly introduced a loop that may
2699 not appear inside libcall block. */
2700 for (insn = insns; insn; insn = NEXT_INSN (insn))
2701 if (JUMP_P (insn))
2702 failed = true;
2704 if (failed)
2706 end_sequence ();
2707 emit_insn (insns);
2709 else
2711 rtx note = 0;
2712 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2714 /* Mark the return value as a pointer if needed. */
2715 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2716 mark_reg_pointer (temp,
2717 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
2719 end_sequence ();
2720 if (flag_unsafe_math_optimizations
2721 && fndecl
2722 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2723 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRT
2724 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTF
2725 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTL))
2726 note = gen_rtx_fmt_e (SQRT,
2727 GET_MODE (temp),
2728 args[0].initial_value);
2729 else
2731 /* Construct an "equal form" for the value which
2732 mentions all the arguments in order as well as
2733 the function name. */
2734 for (i = 0; i < num_actuals; i++)
2735 note = gen_rtx_EXPR_LIST (VOIDmode,
2736 args[i].initial_value, note);
2737 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2739 if (flags & ECF_PURE)
2740 note = gen_rtx_EXPR_LIST (VOIDmode,
2741 gen_rtx_USE (VOIDmode,
2742 gen_rtx_MEM (BLKmode,
2743 gen_rtx_SCRATCH (VOIDmode))),
2744 note);
2746 emit_libcall_block (insns, temp, valreg, note);
2748 valreg = temp;
2751 else if (pass && (flags & ECF_MALLOC))
2753 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2754 rtx last, insns;
2756 /* The return value from a malloc-like function is a pointer. */
2757 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2758 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
2760 emit_move_insn (temp, valreg);
2762 /* The return value from a malloc-like function can not alias
2763 anything else. */
2764 last = get_last_insn ();
2765 REG_NOTES (last) =
2766 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2768 /* Write out the sequence. */
2769 insns = get_insns ();
2770 end_sequence ();
2771 emit_insn (insns);
2772 valreg = temp;
2775 /* For calls to `setjmp', etc., inform flow.c it should complain
2776 if nonvolatile values are live. For functions that cannot return,
2777 inform flow that control does not fall through. */
2779 if ((flags & ECF_NORETURN) || pass == 0)
2781 /* The barrier must be emitted
2782 immediately after the CALL_INSN. Some ports emit more
2783 than just a CALL_INSN above, so we must search for it here. */
2785 rtx last = get_last_insn ();
2786 while (!CALL_P (last))
2788 last = PREV_INSN (last);
2789 /* There was no CALL_INSN? */
2790 gcc_assert (last != before_call);
2793 emit_barrier_after (last);
2795 /* Stack adjustments after a noreturn call are dead code.
2796 However when NO_DEFER_POP is in effect, we must preserve
2797 stack_pointer_delta. */
2798 if (inhibit_defer_pop == 0)
2800 stack_pointer_delta = old_stack_allocated;
2801 pending_stack_adjust = 0;
2805 /* If value type not void, return an rtx for the value. */
2807 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2808 || ignore)
2809 target = const0_rtx;
2810 else if (structure_value_addr)
2812 if (target == 0 || !MEM_P (target))
2814 target
2815 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2816 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2817 structure_value_addr));
2818 set_mem_attributes (target, exp, 1);
2821 else if (pcc_struct_value)
2823 /* This is the special C++ case where we need to
2824 know what the true target was. We take care to
2825 never use this value more than once in one expression. */
2826 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2827 copy_to_reg (valreg));
2828 set_mem_attributes (target, exp, 1);
2830 /* Handle calls that return values in multiple non-contiguous locations.
2831 The Irix 6 ABI has examples of this. */
2832 else if (GET_CODE (valreg) == PARALLEL)
2834 if (target == 0)
2836 /* This will only be assigned once, so it can be readonly. */
2837 tree nt = build_qualified_type (TREE_TYPE (exp),
2838 (TYPE_QUALS (TREE_TYPE (exp))
2839 | TYPE_QUAL_CONST));
2841 target = assign_temp (nt, 0, 1, 1);
2844 if (! rtx_equal_p (target, valreg))
2845 emit_group_store (target, valreg, TREE_TYPE (exp),
2846 int_size_in_bytes (TREE_TYPE (exp)));
2848 /* We can not support sibling calls for this case. */
2849 sibcall_failure = 1;
2851 else if (target
2852 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2853 && GET_MODE (target) == GET_MODE (valreg))
2855 /* TARGET and VALREG cannot be equal at this point because the
2856 latter would not have REG_FUNCTION_VALUE_P true, while the
2857 former would if it were referring to the same register.
2859 If they refer to the same register, this move will be a no-op,
2860 except when function inlining is being done. */
2861 emit_move_insn (target, valreg);
2863 /* If we are setting a MEM, this code must be executed. Since it is
2864 emitted after the call insn, sibcall optimization cannot be
2865 performed in that case. */
2866 if (MEM_P (target))
2867 sibcall_failure = 1;
2869 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2871 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
2873 /* We can not support sibling calls for this case. */
2874 sibcall_failure = 1;
2876 else
2877 target = copy_to_reg (valreg);
2879 if (targetm.calls.promote_function_return(funtype))
2881 /* If we promoted this return value, make the proper SUBREG.
2882 TARGET might be const0_rtx here, so be careful. */
2883 if (REG_P (target)
2884 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2885 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2887 tree type = TREE_TYPE (exp);
2888 int unsignedp = TYPE_UNSIGNED (type);
2889 int offset = 0;
2890 enum machine_mode pmode;
2892 pmode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
2893 /* If we don't promote as expected, something is wrong. */
2894 gcc_assert (GET_MODE (target) == pmode);
2896 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
2897 && (GET_MODE_SIZE (GET_MODE (target))
2898 > GET_MODE_SIZE (TYPE_MODE (type))))
2900 offset = GET_MODE_SIZE (GET_MODE (target))
2901 - GET_MODE_SIZE (TYPE_MODE (type));
2902 if (! BYTES_BIG_ENDIAN)
2903 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
2904 else if (! WORDS_BIG_ENDIAN)
2905 offset %= UNITS_PER_WORD;
2907 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
2908 SUBREG_PROMOTED_VAR_P (target) = 1;
2909 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
2913 /* If size of args is variable or this was a constructor call for a stack
2914 argument, restore saved stack-pointer value. */
2916 if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
2918 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2919 stack_pointer_delta = old_stack_pointer_delta;
2920 pending_stack_adjust = old_pending_adj;
2921 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2922 stack_arg_under_construction = old_stack_arg_under_construction;
2923 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2924 stack_usage_map = initial_stack_usage_map;
2925 sibcall_failure = 1;
2927 else if (ACCUMULATE_OUTGOING_ARGS && pass)
2929 #ifdef REG_PARM_STACK_SPACE
2930 if (save_area)
2931 restore_fixed_argument_area (save_area, argblock,
2932 high_to_save, low_to_save);
2933 #endif
2935 /* If we saved any argument areas, restore them. */
2936 for (i = 0; i < num_actuals; i++)
2937 if (args[i].save_area)
2939 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2940 rtx stack_area
2941 = gen_rtx_MEM (save_mode,
2942 memory_address (save_mode,
2943 XEXP (args[i].stack_slot, 0)));
2945 if (save_mode != BLKmode)
2946 emit_move_insn (stack_area, args[i].save_area);
2947 else
2948 emit_block_move (stack_area, args[i].save_area,
2949 GEN_INT (args[i].locate.size.constant),
2950 BLOCK_OP_CALL_PARM);
2953 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2954 stack_usage_map = initial_stack_usage_map;
2957 /* If this was alloca, record the new stack level for nonlocal gotos.
2958 Check for the handler slots since we might not have a save area
2959 for non-local gotos. */
2961 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
2962 update_nonlocal_goto_save_area ();
2964 /* Free up storage we no longer need. */
2965 for (i = 0; i < num_actuals; ++i)
2966 if (args[i].aligned_regs)
2967 free (args[i].aligned_regs);
2969 insns = get_insns ();
2970 end_sequence ();
2972 if (pass == 0)
2974 tail_call_insns = insns;
2976 /* Restore the pending stack adjustment now that we have
2977 finished generating the sibling call sequence. */
2979 pending_stack_adjust = save_pending_stack_adjust;
2980 stack_pointer_delta = save_stack_pointer_delta;
2982 /* Prepare arg structure for next iteration. */
2983 for (i = 0; i < num_actuals; i++)
2985 args[i].value = 0;
2986 args[i].aligned_regs = 0;
2987 args[i].stack = 0;
2990 sbitmap_free (stored_args_map);
2992 else
2994 normal_call_insns = insns;
2996 /* Verify that we've deallocated all the stack we used. */
2997 gcc_assert ((flags & ECF_NORETURN)
2998 || (old_stack_allocated
2999 == stack_pointer_delta - pending_stack_adjust));
3002 /* If something prevents making this a sibling call,
3003 zero out the sequence. */
3004 if (sibcall_failure)
3005 tail_call_insns = NULL_RTX;
3006 else
3007 break;
3010 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3011 arguments too, as argument area is now clobbered by the call. */
3012 if (tail_call_insns)
3014 emit_insn (tail_call_insns);
3015 cfun->tail_call_emit = true;
3017 else
3018 emit_insn (normal_call_insns);
3020 currently_expanding_call--;
3022 /* If this function returns with the stack pointer depressed, ensure
3023 this block saves and restores the stack pointer, show it was
3024 changed, and adjust for any outgoing arg space. */
3025 if (flags & ECF_SP_DEPRESSED)
3027 clear_pending_stack_adjust ();
3028 emit_insn (gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx));
3029 emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3032 return target;
3035 /* A sibling call sequence invalidates any REG_EQUIV notes made for
3036 this function's incoming arguments.
3038 At the start of RTL generation we know the only REG_EQUIV notes
3039 in the rtl chain are those for incoming arguments, so we can safely
3040 flush any REG_EQUIV note.
3042 This is (slight) overkill. We could keep track of the highest
3043 argument we clobber and be more selective in removing notes, but it
3044 does not seem to be worth the effort. */
3045 void
3046 fixup_tail_calls (void)
3048 purge_reg_equiv_notes ();
3051 /* Traverse an argument list in VALUES and expand all complex
3052 arguments into their components. */
3053 static tree
3054 split_complex_values (tree values)
3056 tree p;
3058 /* Before allocating memory, check for the common case of no complex. */
3059 for (p = values; p; p = TREE_CHAIN (p))
3061 tree type = TREE_TYPE (TREE_VALUE (p));
3062 if (type && TREE_CODE (type) == COMPLEX_TYPE
3063 && targetm.calls.split_complex_arg (type))
3064 goto found;
3066 return values;
3068 found:
3069 values = copy_list (values);
3071 for (p = values; p; p = TREE_CHAIN (p))
3073 tree complex_value = TREE_VALUE (p);
3074 tree complex_type;
3076 complex_type = TREE_TYPE (complex_value);
3077 if (!complex_type)
3078 continue;
3080 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3081 && targetm.calls.split_complex_arg (complex_type))
3083 tree subtype;
3084 tree real, imag, next;
3086 subtype = TREE_TYPE (complex_type);
3087 complex_value = save_expr (complex_value);
3088 real = build1 (REALPART_EXPR, subtype, complex_value);
3089 imag = build1 (IMAGPART_EXPR, subtype, complex_value);
3091 TREE_VALUE (p) = real;
3092 next = TREE_CHAIN (p);
3093 imag = build_tree_list (NULL_TREE, imag);
3094 TREE_CHAIN (p) = imag;
3095 TREE_CHAIN (imag) = next;
3097 /* Skip the newly created node. */
3098 p = TREE_CHAIN (p);
3102 return values;
3105 /* Traverse a list of TYPES and expand all complex types into their
3106 components. */
3107 static tree
3108 split_complex_types (tree types)
3110 tree p;
3112 /* Before allocating memory, check for the common case of no complex. */
3113 for (p = types; p; p = TREE_CHAIN (p))
3115 tree type = TREE_VALUE (p);
3116 if (TREE_CODE (type) == COMPLEX_TYPE
3117 && targetm.calls.split_complex_arg (type))
3118 goto found;
3120 return types;
3122 found:
3123 types = copy_list (types);
3125 for (p = types; p; p = TREE_CHAIN (p))
3127 tree complex_type = TREE_VALUE (p);
3129 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3130 && targetm.calls.split_complex_arg (complex_type))
3132 tree next, imag;
3134 /* Rewrite complex type with component type. */
3135 TREE_VALUE (p) = TREE_TYPE (complex_type);
3136 next = TREE_CHAIN (p);
3138 /* Add another component type for the imaginary part. */
3139 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3140 TREE_CHAIN (p) = imag;
3141 TREE_CHAIN (imag) = next;
3143 /* Skip the newly created node. */
3144 p = TREE_CHAIN (p);
3148 return types;
3151 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3152 The RETVAL parameter specifies whether return value needs to be saved, other
3153 parameters are documented in the emit_library_call function below. */
3155 static rtx
3156 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3157 enum libcall_type fn_type,
3158 enum machine_mode outmode, int nargs, va_list p)
3160 /* Total size in bytes of all the stack-parms scanned so far. */
3161 struct args_size args_size;
3162 /* Size of arguments before any adjustments (such as rounding). */
3163 struct args_size original_args_size;
3164 int argnum;
3165 rtx fun;
3166 int inc;
3167 int count;
3168 rtx argblock = 0;
3169 CUMULATIVE_ARGS args_so_far;
3170 struct arg
3172 rtx value;
3173 enum machine_mode mode;
3174 rtx reg;
3175 int partial;
3176 struct locate_and_pad_arg_data locate;
3177 rtx save_area;
3179 struct arg *argvec;
3180 int old_inhibit_defer_pop = inhibit_defer_pop;
3181 rtx call_fusage = 0;
3182 rtx mem_value = 0;
3183 rtx valreg;
3184 int pcc_struct_value = 0;
3185 int struct_value_size = 0;
3186 int flags;
3187 int reg_parm_stack_space = 0;
3188 int needed;
3189 rtx before_call;
3190 tree tfom; /* type_for_mode (outmode, 0) */
3192 #ifdef REG_PARM_STACK_SPACE
3193 /* Define the boundary of the register parm stack space that needs to be
3194 save, if any. */
3195 int low_to_save, high_to_save;
3196 rtx save_area = 0; /* Place that it is saved. */
3197 #endif
3199 /* Size of the stack reserved for parameter registers. */
3200 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3201 char *initial_stack_usage_map = stack_usage_map;
3203 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3205 #ifdef REG_PARM_STACK_SPACE
3206 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3207 #endif
3209 /* By default, library functions can not throw. */
3210 flags = ECF_NOTHROW;
3212 switch (fn_type)
3214 case LCT_NORMAL:
3215 break;
3216 case LCT_CONST:
3217 flags |= ECF_CONST;
3218 break;
3219 case LCT_PURE:
3220 flags |= ECF_PURE;
3221 break;
3222 case LCT_CONST_MAKE_BLOCK:
3223 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
3224 break;
3225 case LCT_PURE_MAKE_BLOCK:
3226 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
3227 break;
3228 case LCT_NORETURN:
3229 flags |= ECF_NORETURN;
3230 break;
3231 case LCT_THROW:
3232 flags = ECF_NORETURN;
3233 break;
3234 case LCT_RETURNS_TWICE:
3235 flags = ECF_RETURNS_TWICE;
3236 break;
3238 fun = orgfun;
3240 /* Ensure current function's preferred stack boundary is at least
3241 what we need. */
3242 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3243 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3245 /* If this kind of value comes back in memory,
3246 decide where in memory it should come back. */
3247 if (outmode != VOIDmode)
3249 tfom = lang_hooks.types.type_for_mode (outmode, 0);
3250 if (aggregate_value_p (tfom, 0))
3252 #ifdef PCC_STATIC_STRUCT_RETURN
3253 rtx pointer_reg
3254 = hard_function_value (build_pointer_type (tfom), 0, 0);
3255 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3256 pcc_struct_value = 1;
3257 if (value == 0)
3258 value = gen_reg_rtx (outmode);
3259 #else /* not PCC_STATIC_STRUCT_RETURN */
3260 struct_value_size = GET_MODE_SIZE (outmode);
3261 if (value != 0 && MEM_P (value))
3262 mem_value = value;
3263 else
3264 mem_value = assign_temp (tfom, 0, 1, 1);
3265 #endif
3266 /* This call returns a big structure. */
3267 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3270 else
3271 tfom = void_type_node;
3273 /* ??? Unfinished: must pass the memory address as an argument. */
3275 /* Copy all the libcall-arguments out of the varargs data
3276 and into a vector ARGVEC.
3278 Compute how to pass each argument. We only support a very small subset
3279 of the full argument passing conventions to limit complexity here since
3280 library functions shouldn't have many args. */
3282 argvec = alloca ((nargs + 1) * sizeof (struct arg));
3283 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3285 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3286 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3287 #else
3288 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0, nargs);
3289 #endif
3291 args_size.constant = 0;
3292 args_size.var = 0;
3294 count = 0;
3296 /* Now we are about to start emitting insns that can be deleted
3297 if a libcall is deleted. */
3298 if (flags & ECF_LIBCALL_BLOCK)
3299 start_sequence ();
3301 push_temp_slots ();
3303 /* If there's a structure value address to be passed,
3304 either pass it in the special place, or pass it as an extra argument. */
3305 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3307 rtx addr = XEXP (mem_value, 0);
3309 nargs++;
3311 /* Make sure it is a reasonable operand for a move or push insn. */
3312 if (!REG_P (addr) && !MEM_P (addr)
3313 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3314 addr = force_operand (addr, NULL_RTX);
3316 argvec[count].value = addr;
3317 argvec[count].mode = Pmode;
3318 argvec[count].partial = 0;
3320 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3321 gcc_assert (targetm.calls.arg_partial_bytes (&args_so_far, Pmode,
3322 NULL_TREE, 1) == 0);
3324 locate_and_pad_parm (Pmode, NULL_TREE,
3325 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3327 #else
3328 argvec[count].reg != 0,
3329 #endif
3330 0, NULL_TREE, &args_size, &argvec[count].locate);
3332 if (argvec[count].reg == 0 || argvec[count].partial != 0
3333 || reg_parm_stack_space > 0)
3334 args_size.constant += argvec[count].locate.size.constant;
3336 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3338 count++;
3341 for (; count < nargs; count++)
3343 rtx val = va_arg (p, rtx);
3344 enum machine_mode mode = va_arg (p, enum machine_mode);
3346 /* We cannot convert the arg value to the mode the library wants here;
3347 must do it earlier where we know the signedness of the arg. */
3348 gcc_assert (mode != BLKmode
3349 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
3351 /* Make sure it is a reasonable operand for a move or push insn. */
3352 if (!REG_P (val) && !MEM_P (val)
3353 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3354 val = force_operand (val, NULL_RTX);
3356 if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1))
3358 rtx slot;
3359 int must_copy
3360 = !reference_callee_copied (&args_so_far, mode, NULL_TREE, 1);
3362 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3363 functions, so we have to pretend this isn't such a function. */
3364 if (flags & ECF_LIBCALL_BLOCK)
3366 rtx insns = get_insns ();
3367 end_sequence ();
3368 emit_insn (insns);
3370 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3372 /* If this was a CONST function, it is now PURE since
3373 it now reads memory. */
3374 if (flags & ECF_CONST)
3376 flags &= ~ECF_CONST;
3377 flags |= ECF_PURE;
3380 if (GET_MODE (val) == MEM && !must_copy)
3381 slot = val;
3382 else
3384 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
3385 0, 1, 1);
3386 emit_move_insn (slot, val);
3389 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3390 gen_rtx_USE (VOIDmode, slot),
3391 call_fusage);
3392 if (must_copy)
3393 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3394 gen_rtx_CLOBBER (VOIDmode,
3395 slot),
3396 call_fusage);
3398 mode = Pmode;
3399 val = force_operand (XEXP (slot, 0), NULL_RTX);
3402 argvec[count].value = val;
3403 argvec[count].mode = mode;
3405 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3407 argvec[count].partial
3408 = targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL_TREE, 1);
3410 locate_and_pad_parm (mode, NULL_TREE,
3411 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3413 #else
3414 argvec[count].reg != 0,
3415 #endif
3416 argvec[count].partial,
3417 NULL_TREE, &args_size, &argvec[count].locate);
3419 gcc_assert (!argvec[count].locate.size.var);
3421 if (argvec[count].reg == 0 || argvec[count].partial != 0
3422 || reg_parm_stack_space > 0)
3423 args_size.constant += argvec[count].locate.size.constant;
3425 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3428 /* If this machine requires an external definition for library
3429 functions, write one out. */
3430 assemble_external_libcall (fun);
3432 original_args_size = args_size;
3433 args_size.constant = (((args_size.constant
3434 + stack_pointer_delta
3435 + STACK_BYTES - 1)
3436 / STACK_BYTES
3437 * STACK_BYTES)
3438 - stack_pointer_delta);
3440 args_size.constant = MAX (args_size.constant,
3441 reg_parm_stack_space);
3443 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3444 args_size.constant -= reg_parm_stack_space;
3445 #endif
3447 if (args_size.constant > current_function_outgoing_args_size)
3448 current_function_outgoing_args_size = args_size.constant;
3450 if (ACCUMULATE_OUTGOING_ARGS)
3452 /* Since the stack pointer will never be pushed, it is possible for
3453 the evaluation of a parm to clobber something we have already
3454 written to the stack. Since most function calls on RISC machines
3455 do not use the stack, this is uncommon, but must work correctly.
3457 Therefore, we save any area of the stack that was already written
3458 and that we are using. Here we set up to do this by making a new
3459 stack usage map from the old one.
3461 Another approach might be to try to reorder the argument
3462 evaluations to avoid this conflicting stack usage. */
3464 needed = args_size.constant;
3466 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3467 /* Since we will be writing into the entire argument area, the
3468 map must be allocated for its entire size, not just the part that
3469 is the responsibility of the caller. */
3470 needed += reg_parm_stack_space;
3471 #endif
3473 #ifdef ARGS_GROW_DOWNWARD
3474 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3475 needed + 1);
3476 #else
3477 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3478 needed);
3479 #endif
3480 stack_usage_map = alloca (highest_outgoing_arg_in_use);
3482 if (initial_highest_arg_in_use)
3483 memcpy (stack_usage_map, initial_stack_usage_map,
3484 initial_highest_arg_in_use);
3486 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3487 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3488 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3489 needed = 0;
3491 /* We must be careful to use virtual regs before they're instantiated,
3492 and real regs afterwards. Loop optimization, for example, can create
3493 new libcalls after we've instantiated the virtual regs, and if we
3494 use virtuals anyway, they won't match the rtl patterns. */
3496 if (virtuals_instantiated)
3497 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3498 else
3499 argblock = virtual_outgoing_args_rtx;
3501 else
3503 if (!PUSH_ARGS)
3504 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3507 /* If we push args individually in reverse order, perform stack alignment
3508 before the first push (the last arg). */
3509 if (argblock == 0 && PUSH_ARGS_REVERSED)
3510 anti_adjust_stack (GEN_INT (args_size.constant
3511 - original_args_size.constant));
3513 if (PUSH_ARGS_REVERSED)
3515 inc = -1;
3516 argnum = nargs - 1;
3518 else
3520 inc = 1;
3521 argnum = 0;
3524 #ifdef REG_PARM_STACK_SPACE
3525 if (ACCUMULATE_OUTGOING_ARGS)
3527 /* The argument list is the property of the called routine and it
3528 may clobber it. If the fixed area has been used for previous
3529 parameters, we must save and restore it. */
3530 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3531 &low_to_save, &high_to_save);
3533 #endif
3535 /* Push the args that need to be pushed. */
3537 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3538 are to be pushed. */
3539 for (count = 0; count < nargs; count++, argnum += inc)
3541 enum machine_mode mode = argvec[argnum].mode;
3542 rtx val = argvec[argnum].value;
3543 rtx reg = argvec[argnum].reg;
3544 int partial = argvec[argnum].partial;
3545 int lower_bound = 0, upper_bound = 0, i;
3547 if (! (reg != 0 && partial == 0))
3549 if (ACCUMULATE_OUTGOING_ARGS)
3551 /* If this is being stored into a pre-allocated, fixed-size,
3552 stack area, save any previous data at that location. */
3554 #ifdef ARGS_GROW_DOWNWARD
3555 /* stack_slot is negative, but we want to index stack_usage_map
3556 with positive values. */
3557 upper_bound = -argvec[argnum].locate.offset.constant + 1;
3558 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3559 #else
3560 lower_bound = argvec[argnum].locate.offset.constant;
3561 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3562 #endif
3564 i = lower_bound;
3565 /* Don't worry about things in the fixed argument area;
3566 it has already been saved. */
3567 if (i < reg_parm_stack_space)
3568 i = reg_parm_stack_space;
3569 while (i < upper_bound && stack_usage_map[i] == 0)
3570 i++;
3572 if (i < upper_bound)
3574 /* We need to make a save area. */
3575 unsigned int size
3576 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3577 enum machine_mode save_mode
3578 = mode_for_size (size, MODE_INT, 1);
3579 rtx adr
3580 = plus_constant (argblock,
3581 argvec[argnum].locate.offset.constant);
3582 rtx stack_area
3583 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
3585 if (save_mode == BLKmode)
3587 argvec[argnum].save_area
3588 = assign_stack_temp (BLKmode,
3589 argvec[argnum].locate.size.constant,
3592 emit_block_move (validize_mem (argvec[argnum].save_area),
3593 stack_area,
3594 GEN_INT (argvec[argnum].locate.size.constant),
3595 BLOCK_OP_CALL_PARM);
3597 else
3599 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3601 emit_move_insn (argvec[argnum].save_area, stack_area);
3606 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
3607 partial, reg, 0, argblock,
3608 GEN_INT (argvec[argnum].locate.offset.constant),
3609 reg_parm_stack_space,
3610 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
3612 /* Now mark the segment we just used. */
3613 if (ACCUMULATE_OUTGOING_ARGS)
3614 for (i = lower_bound; i < upper_bound; i++)
3615 stack_usage_map[i] = 1;
3617 NO_DEFER_POP;
3621 /* If we pushed args in forward order, perform stack alignment
3622 after pushing the last arg. */
3623 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3624 anti_adjust_stack (GEN_INT (args_size.constant
3625 - original_args_size.constant));
3627 if (PUSH_ARGS_REVERSED)
3628 argnum = nargs - 1;
3629 else
3630 argnum = 0;
3632 fun = prepare_call_address (fun, NULL, &call_fusage, 0, 0);
3634 /* Now load any reg parms into their regs. */
3636 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3637 are to be pushed. */
3638 for (count = 0; count < nargs; count++, argnum += inc)
3640 enum machine_mode mode = argvec[argnum].mode;
3641 rtx val = argvec[argnum].value;
3642 rtx reg = argvec[argnum].reg;
3643 int partial = argvec[argnum].partial;
3645 /* Handle calls that pass values in multiple non-contiguous
3646 locations. The PA64 has examples of this for library calls. */
3647 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3648 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3649 else if (reg != 0 && partial == 0)
3650 emit_move_insn (reg, val);
3652 NO_DEFER_POP;
3655 /* Any regs containing parms remain in use through the call. */
3656 for (count = 0; count < nargs; count++)
3658 rtx reg = argvec[count].reg;
3659 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3660 use_group_regs (&call_fusage, reg);
3661 else if (reg != 0)
3662 use_reg (&call_fusage, reg);
3665 /* Pass the function the address in which to return a structure value. */
3666 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3668 emit_move_insn (struct_value,
3669 force_reg (Pmode,
3670 force_operand (XEXP (mem_value, 0),
3671 NULL_RTX)));
3672 if (REG_P (struct_value))
3673 use_reg (&call_fusage, struct_value);
3676 /* Don't allow popping to be deferred, since then
3677 cse'ing of library calls could delete a call and leave the pop. */
3678 NO_DEFER_POP;
3679 valreg = (mem_value == 0 && outmode != VOIDmode
3680 ? hard_libcall_value (outmode) : NULL_RTX);
3682 /* Stack must be properly aligned now. */
3683 gcc_assert (!(stack_pointer_delta
3684 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
3686 before_call = get_last_insn ();
3688 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3689 will set inhibit_defer_pop to that value. */
3690 /* The return type is needed to decide how many bytes the function pops.
3691 Signedness plays no role in that, so for simplicity, we pretend it's
3692 always signed. We also assume that the list of arguments passed has
3693 no impact, so we pretend it is unknown. */
3695 emit_call_1 (fun, NULL,
3696 get_identifier (XSTR (orgfun, 0)),
3697 build_function_type (tfom, NULL_TREE),
3698 original_args_size.constant, args_size.constant,
3699 struct_value_size,
3700 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3701 valreg,
3702 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
3704 /* For calls to `setjmp', etc., inform flow.c it should complain
3705 if nonvolatile values are live. For functions that cannot return,
3706 inform flow that control does not fall through. */
3708 if (flags & ECF_NORETURN)
3710 /* The barrier note must be emitted
3711 immediately after the CALL_INSN. Some ports emit more than
3712 just a CALL_INSN above, so we must search for it here. */
3714 rtx last = get_last_insn ();
3715 while (!CALL_P (last))
3717 last = PREV_INSN (last);
3718 /* There was no CALL_INSN? */
3719 gcc_assert (last != before_call);
3722 emit_barrier_after (last);
3725 /* Now restore inhibit_defer_pop to its actual original value. */
3726 OK_DEFER_POP;
3728 /* If call is cse'able, make appropriate pair of reg-notes around it.
3729 Test valreg so we don't crash; may safely ignore `const'
3730 if return type is void. Disable for PARALLEL return values, because
3731 we have no way to move such values into a pseudo register. */
3732 if (flags & ECF_LIBCALL_BLOCK)
3734 rtx insns;
3736 if (valreg == 0)
3738 insns = get_insns ();
3739 end_sequence ();
3740 emit_insn (insns);
3742 else
3744 rtx note = 0;
3745 rtx temp;
3746 int i;
3748 if (GET_CODE (valreg) == PARALLEL)
3750 temp = gen_reg_rtx (outmode);
3751 emit_group_store (temp, valreg, NULL_TREE,
3752 GET_MODE_SIZE (outmode));
3753 valreg = temp;
3756 temp = gen_reg_rtx (GET_MODE (valreg));
3758 /* Construct an "equal form" for the value which mentions all the
3759 arguments in order as well as the function name. */
3760 for (i = 0; i < nargs; i++)
3761 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
3762 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
3764 insns = get_insns ();
3765 end_sequence ();
3767 if (flags & ECF_PURE)
3768 note = gen_rtx_EXPR_LIST (VOIDmode,
3769 gen_rtx_USE (VOIDmode,
3770 gen_rtx_MEM (BLKmode,
3771 gen_rtx_SCRATCH (VOIDmode))),
3772 note);
3774 emit_libcall_block (insns, temp, valreg, note);
3776 valreg = temp;
3779 pop_temp_slots ();
3781 /* Copy the value to the right place. */
3782 if (outmode != VOIDmode && retval)
3784 if (mem_value)
3786 if (value == 0)
3787 value = mem_value;
3788 if (value != mem_value)
3789 emit_move_insn (value, mem_value);
3791 else if (GET_CODE (valreg) == PARALLEL)
3793 if (value == 0)
3794 value = gen_reg_rtx (outmode);
3795 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
3797 else if (value != 0)
3798 emit_move_insn (value, valreg);
3799 else
3800 value = valreg;
3803 if (ACCUMULATE_OUTGOING_ARGS)
3805 #ifdef REG_PARM_STACK_SPACE
3806 if (save_area)
3807 restore_fixed_argument_area (save_area, argblock,
3808 high_to_save, low_to_save);
3809 #endif
3811 /* If we saved any argument areas, restore them. */
3812 for (count = 0; count < nargs; count++)
3813 if (argvec[count].save_area)
3815 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3816 rtx adr = plus_constant (argblock,
3817 argvec[count].locate.offset.constant);
3818 rtx stack_area = gen_rtx_MEM (save_mode,
3819 memory_address (save_mode, adr));
3821 if (save_mode == BLKmode)
3822 emit_block_move (stack_area,
3823 validize_mem (argvec[count].save_area),
3824 GEN_INT (argvec[count].locate.size.constant),
3825 BLOCK_OP_CALL_PARM);
3826 else
3827 emit_move_insn (stack_area, argvec[count].save_area);
3830 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3831 stack_usage_map = initial_stack_usage_map;
3834 return value;
3838 /* Output a library call to function FUN (a SYMBOL_REF rtx)
3839 (emitting the queue unless NO_QUEUE is nonzero),
3840 for a value of mode OUTMODE,
3841 with NARGS different arguments, passed as alternating rtx values
3842 and machine_modes to convert them to.
3844 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
3845 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
3846 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
3847 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
3848 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
3849 or other LCT_ value for other types of library calls. */
3851 void
3852 emit_library_call (rtx orgfun, enum libcall_type fn_type,
3853 enum machine_mode outmode, int nargs, ...)
3855 va_list p;
3857 va_start (p, nargs);
3858 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
3859 va_end (p);
3862 /* Like emit_library_call except that an extra argument, VALUE,
3863 comes second and says where to store the result.
3864 (If VALUE is zero, this function chooses a convenient way
3865 to return the value.
3867 This function returns an rtx for where the value is to be found.
3868 If VALUE is nonzero, VALUE is returned. */
3871 emit_library_call_value (rtx orgfun, rtx value,
3872 enum libcall_type fn_type,
3873 enum machine_mode outmode, int nargs, ...)
3875 rtx result;
3876 va_list p;
3878 va_start (p, nargs);
3879 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
3880 nargs, p);
3881 va_end (p);
3883 return result;
3886 /* Store a single argument for a function call
3887 into the register or memory area where it must be passed.
3888 *ARG describes the argument value and where to pass it.
3890 ARGBLOCK is the address of the stack-block for all the arguments,
3891 or 0 on a machine where arguments are pushed individually.
3893 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3894 so must be careful about how the stack is used.
3896 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3897 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3898 that we need not worry about saving and restoring the stack.
3900 FNDECL is the declaration of the function we are calling.
3902 Return nonzero if this arg should cause sibcall failure,
3903 zero otherwise. */
3905 static int
3906 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
3907 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
3909 tree pval = arg->tree_value;
3910 rtx reg = 0;
3911 int partial = 0;
3912 int used = 0;
3913 int i, lower_bound = 0, upper_bound = 0;
3914 int sibcall_failure = 0;
3916 if (TREE_CODE (pval) == ERROR_MARK)
3917 return 1;
3919 /* Push a new temporary level for any temporaries we make for
3920 this argument. */
3921 push_temp_slots ();
3923 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
3925 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3926 save any previous data at that location. */
3927 if (argblock && ! variable_size && arg->stack)
3929 #ifdef ARGS_GROW_DOWNWARD
3930 /* stack_slot is negative, but we want to index stack_usage_map
3931 with positive values. */
3932 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3933 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3934 else
3935 upper_bound = 0;
3937 lower_bound = upper_bound - arg->locate.size.constant;
3938 #else
3939 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3940 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3941 else
3942 lower_bound = 0;
3944 upper_bound = lower_bound + arg->locate.size.constant;
3945 #endif
3947 i = lower_bound;
3948 /* Don't worry about things in the fixed argument area;
3949 it has already been saved. */
3950 if (i < reg_parm_stack_space)
3951 i = reg_parm_stack_space;
3952 while (i < upper_bound && stack_usage_map[i] == 0)
3953 i++;
3955 if (i < upper_bound)
3957 /* We need to make a save area. */
3958 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
3959 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
3960 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
3961 rtx stack_area = gen_rtx_MEM (save_mode, adr);
3963 if (save_mode == BLKmode)
3965 tree ot = TREE_TYPE (arg->tree_value);
3966 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
3967 | TYPE_QUAL_CONST));
3969 arg->save_area = assign_temp (nt, 0, 1, 1);
3970 preserve_temp_slots (arg->save_area);
3971 emit_block_move (validize_mem (arg->save_area), stack_area,
3972 expr_size (arg->tree_value),
3973 BLOCK_OP_CALL_PARM);
3975 else
3977 arg->save_area = gen_reg_rtx (save_mode);
3978 emit_move_insn (arg->save_area, stack_area);
3984 /* If this isn't going to be placed on both the stack and in registers,
3985 set up the register and number of words. */
3986 if (! arg->pass_on_stack)
3988 if (flags & ECF_SIBCALL)
3989 reg = arg->tail_call_reg;
3990 else
3991 reg = arg->reg;
3992 partial = arg->partial;
3995 /* Being passed entirely in a register. We shouldn't be called in
3996 this case. */
3997 gcc_assert (reg == 0 || partial != 0);
3999 /* If this arg needs special alignment, don't load the registers
4000 here. */
4001 if (arg->n_aligned_regs != 0)
4002 reg = 0;
4004 /* If this is being passed partially in a register, we can't evaluate
4005 it directly into its stack slot. Otherwise, we can. */
4006 if (arg->value == 0)
4008 /* stack_arg_under_construction is nonzero if a function argument is
4009 being evaluated directly into the outgoing argument list and
4010 expand_call must take special action to preserve the argument list
4011 if it is called recursively.
4013 For scalar function arguments stack_usage_map is sufficient to
4014 determine which stack slots must be saved and restored. Scalar
4015 arguments in general have pass_on_stack == 0.
4017 If this argument is initialized by a function which takes the
4018 address of the argument (a C++ constructor or a C function
4019 returning a BLKmode structure), then stack_usage_map is
4020 insufficient and expand_call must push the stack around the
4021 function call. Such arguments have pass_on_stack == 1.
4023 Note that it is always safe to set stack_arg_under_construction,
4024 but this generates suboptimal code if set when not needed. */
4026 if (arg->pass_on_stack)
4027 stack_arg_under_construction++;
4029 arg->value = expand_expr (pval,
4030 (partial
4031 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4032 ? NULL_RTX : arg->stack,
4033 VOIDmode, EXPAND_STACK_PARM);
4035 /* If we are promoting object (or for any other reason) the mode
4036 doesn't agree, convert the mode. */
4038 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4039 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4040 arg->value, arg->unsignedp);
4042 if (arg->pass_on_stack)
4043 stack_arg_under_construction--;
4046 /* Don't allow anything left on stack from computation
4047 of argument to alloca. */
4048 if (flags & ECF_MAY_BE_ALLOCA)
4049 do_pending_stack_adjust ();
4051 if (arg->value == arg->stack)
4052 /* If the value is already in the stack slot, we are done. */
4054 else if (arg->mode != BLKmode)
4056 int size;
4058 /* Argument is a scalar, not entirely passed in registers.
4059 (If part is passed in registers, arg->partial says how much
4060 and emit_push_insn will take care of putting it there.)
4062 Push it, and if its size is less than the
4063 amount of space allocated to it,
4064 also bump stack pointer by the additional space.
4065 Note that in C the default argument promotions
4066 will prevent such mismatches. */
4068 size = GET_MODE_SIZE (arg->mode);
4069 /* Compute how much space the push instruction will push.
4070 On many machines, pushing a byte will advance the stack
4071 pointer by a halfword. */
4072 #ifdef PUSH_ROUNDING
4073 size = PUSH_ROUNDING (size);
4074 #endif
4075 used = size;
4077 /* Compute how much space the argument should get:
4078 round up to a multiple of the alignment for arguments. */
4079 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4080 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4081 / (PARM_BOUNDARY / BITS_PER_UNIT))
4082 * (PARM_BOUNDARY / BITS_PER_UNIT));
4084 /* This isn't already where we want it on the stack, so put it there.
4085 This can either be done with push or copy insns. */
4086 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4087 PARM_BOUNDARY, partial, reg, used - size, argblock,
4088 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4089 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4091 /* Unless this is a partially-in-register argument, the argument is now
4092 in the stack. */
4093 if (partial == 0)
4094 arg->value = arg->stack;
4096 else
4098 /* BLKmode, at least partly to be pushed. */
4100 unsigned int parm_align;
4101 int excess;
4102 rtx size_rtx;
4104 /* Pushing a nonscalar.
4105 If part is passed in registers, PARTIAL says how much
4106 and emit_push_insn will take care of putting it there. */
4108 /* Round its size up to a multiple
4109 of the allocation unit for arguments. */
4111 if (arg->locate.size.var != 0)
4113 excess = 0;
4114 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4116 else
4118 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4119 for BLKmode is careful to avoid it. */
4120 excess = (arg->locate.size.constant
4121 - int_size_in_bytes (TREE_TYPE (pval))
4122 + partial);
4123 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4124 NULL_RTX, TYPE_MODE (sizetype), 0);
4127 parm_align = arg->locate.boundary;
4129 /* When an argument is padded down, the block is aligned to
4130 PARM_BOUNDARY, but the actual argument isn't. */
4131 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4133 if (arg->locate.size.var)
4134 parm_align = BITS_PER_UNIT;
4135 else if (excess)
4137 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4138 parm_align = MIN (parm_align, excess_align);
4142 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4144 /* emit_push_insn might not work properly if arg->value and
4145 argblock + arg->locate.offset areas overlap. */
4146 rtx x = arg->value;
4147 int i = 0;
4149 if (XEXP (x, 0) == current_function_internal_arg_pointer
4150 || (GET_CODE (XEXP (x, 0)) == PLUS
4151 && XEXP (XEXP (x, 0), 0) ==
4152 current_function_internal_arg_pointer
4153 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4155 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4156 i = INTVAL (XEXP (XEXP (x, 0), 1));
4158 /* expand_call should ensure this. */
4159 gcc_assert (!arg->locate.offset.var
4160 && GET_CODE (size_rtx) == CONST_INT);
4162 if (arg->locate.offset.constant > i)
4164 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4165 sibcall_failure = 1;
4167 else if (arg->locate.offset.constant < i)
4169 if (i < arg->locate.offset.constant + INTVAL (size_rtx))
4170 sibcall_failure = 1;
4175 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4176 parm_align, partial, reg, excess, argblock,
4177 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4178 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4180 /* Unless this is a partially-in-register argument, the argument is now
4181 in the stack.
4183 ??? Unlike the case above, in which we want the actual
4184 address of the data, so that we can load it directly into a
4185 register, here we want the address of the stack slot, so that
4186 it's properly aligned for word-by-word copying or something
4187 like that. It's not clear that this is always correct. */
4188 if (partial == 0)
4189 arg->value = arg->stack_slot;
4192 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
4194 tree type = TREE_TYPE (arg->tree_value);
4195 arg->parallel_value
4196 = emit_group_load_into_temps (arg->reg, arg->value, type,
4197 int_size_in_bytes (type));
4200 /* Mark all slots this store used. */
4201 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4202 && argblock && ! variable_size && arg->stack)
4203 for (i = lower_bound; i < upper_bound; i++)
4204 stack_usage_map[i] = 1;
4206 /* Once we have pushed something, pops can't safely
4207 be deferred during the rest of the arguments. */
4208 NO_DEFER_POP;
4210 /* Free any temporary slots made in processing this argument. Show
4211 that we might have taken the address of something and pushed that
4212 as an operand. */
4213 preserve_temp_slots (NULL_RTX);
4214 free_temp_slots ();
4215 pop_temp_slots ();
4217 return sibcall_failure;
4220 /* Nonzero if we do not know how to pass TYPE solely in registers. */
4222 bool
4223 must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
4224 tree type)
4226 if (!type)
4227 return false;
4229 /* If the type has variable size... */
4230 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4231 return true;
4233 /* If the type is marked as addressable (it is required
4234 to be constructed into the stack)... */
4235 if (TREE_ADDRESSABLE (type))
4236 return true;
4238 return false;
4241 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
4242 takes trailing padding of a structure into account. */
4243 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
4245 bool
4246 must_pass_in_stack_var_size_or_pad (enum machine_mode mode, tree type)
4248 if (!type)
4249 return false;
4251 /* If the type has variable size... */
4252 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4253 return true;
4255 /* If the type is marked as addressable (it is required
4256 to be constructed into the stack)... */
4257 if (TREE_ADDRESSABLE (type))
4258 return true;
4260 /* If the padding and mode of the type is such that a copy into
4261 a register would put it into the wrong part of the register. */
4262 if (mode == BLKmode
4263 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4264 && (FUNCTION_ARG_PADDING (mode, type)
4265 == (BYTES_BIG_ENDIAN ? upward : downward)))
4266 return true;
4268 return false;