PR rtl-optimization/21848
[official-gcc.git] / gcc / calls.c
blob8e87886de1d0a49f7e76f6fb67b9115cf7480ea5
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "flags.h"
30 #include "expr.h"
31 #include "optabs.h"
32 #include "libfuncs.h"
33 #include "function.h"
34 #include "regs.h"
35 #include "toplev.h"
36 #include "output.h"
37 #include "tm_p.h"
38 #include "timevar.h"
39 #include "sbitmap.h"
40 #include "langhooks.h"
41 #include "target.h"
42 #include "cgraph.h"
43 #include "except.h"
45 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
46 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
48 /* Data structure and subroutines used within expand_call. */
50 struct arg_data
52 /* Tree node for this argument. */
53 tree tree_value;
54 /* Mode for value; TYPE_MODE unless promoted. */
55 enum machine_mode mode;
56 /* Current RTL value for argument, or 0 if it isn't precomputed. */
57 rtx value;
58 /* Initially-compute RTL value for argument; only for const functions. */
59 rtx initial_value;
60 /* Register to pass this argument in, 0 if passed on stack, or an
61 PARALLEL if the arg is to be copied into multiple non-contiguous
62 registers. */
63 rtx reg;
64 /* Register to pass this argument in when generating tail call sequence.
65 This is not the same register as for normal calls on machines with
66 register windows. */
67 rtx tail_call_reg;
68 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
69 form for emit_group_move. */
70 rtx parallel_value;
71 /* If REG was promoted from the actual mode of the argument expression,
72 indicates whether the promotion is sign- or zero-extended. */
73 int unsignedp;
74 /* Number of bytes to put in registers. 0 means put the whole arg
75 in registers. Also 0 if not passed in registers. */
76 int partial;
77 /* Nonzero if argument must be passed on stack.
78 Note that some arguments may be passed on the stack
79 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
80 pass_on_stack identifies arguments that *cannot* go in registers. */
81 int pass_on_stack;
82 /* Some fields packaged up for locate_and_pad_parm. */
83 struct locate_and_pad_arg_data locate;
84 /* Location on the stack at which parameter should be stored. The store
85 has already been done if STACK == VALUE. */
86 rtx stack;
87 /* Location on the stack of the start of this argument slot. This can
88 differ from STACK if this arg pads downward. This location is known
89 to be aligned to FUNCTION_ARG_BOUNDARY. */
90 rtx stack_slot;
91 /* Place that this stack area has been saved, if needed. */
92 rtx save_area;
93 /* If an argument's alignment does not permit direct copying into registers,
94 copy in smaller-sized pieces into pseudos. These are stored in a
95 block pointed to by this field. The next field says how many
96 word-sized pseudos we made. */
97 rtx *aligned_regs;
98 int n_aligned_regs;
101 /* A vector of one char per byte of stack space. A byte if nonzero if
102 the corresponding stack location has been used.
103 This vector is used to prevent a function call within an argument from
104 clobbering any stack already set up. */
105 static char *stack_usage_map;
107 /* Size of STACK_USAGE_MAP. */
108 static int highest_outgoing_arg_in_use;
110 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
111 stack location's tail call argument has been already stored into the stack.
112 This bitmap is used to prevent sibling call optimization if function tries
113 to use parent's incoming argument slots when they have been already
114 overwritten with tail call arguments. */
115 static sbitmap stored_args_map;
117 /* stack_arg_under_construction is nonzero when an argument may be
118 initialized with a constructor call (including a C function that
119 returns a BLKmode struct) and expand_call must take special action
120 to make sure the object being constructed does not overlap the
121 argument list for the constructor call. */
122 static int stack_arg_under_construction;
124 static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
125 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
126 CUMULATIVE_ARGS *);
127 static void precompute_register_parameters (int, struct arg_data *, int *);
128 static int store_one_arg (struct arg_data *, rtx, int, int, int);
129 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
130 static int finalize_must_preallocate (int, int, struct arg_data *,
131 struct args_size *);
132 static void precompute_arguments (int, int, struct arg_data *);
133 static int compute_argument_block_size (int, struct args_size *, int);
134 static void initialize_argument_information (int, struct arg_data *,
135 struct args_size *, int, tree,
136 tree, CUMULATIVE_ARGS *, int,
137 rtx *, int *, int *, int *,
138 bool *, bool);
139 static void compute_argument_addresses (struct arg_data *, rtx, int);
140 static rtx rtx_for_function_call (tree, tree);
141 static void load_register_parameters (struct arg_data *, int, rtx *, int,
142 int, int *);
143 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
144 enum machine_mode, int, va_list);
145 static int special_function_p (tree, int);
146 static int check_sibcall_argument_overlap_1 (rtx);
147 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
149 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
150 unsigned int);
151 static tree split_complex_values (tree);
152 static tree split_complex_types (tree);
154 #ifdef REG_PARM_STACK_SPACE
155 static rtx save_fixed_argument_area (int, rtx, int *, int *);
156 static void restore_fixed_argument_area (rtx, rtx, int, int);
157 #endif
159 /* Force FUNEXP into a form suitable for the address of a CALL,
160 and return that as an rtx. Also load the static chain register
161 if FNDECL is a nested function.
163 CALL_FUSAGE points to a variable holding the prospective
164 CALL_INSN_FUNCTION_USAGE information. */
167 prepare_call_address (rtx funexp, rtx static_chain_value,
168 rtx *call_fusage, int reg_parm_seen, int sibcallp)
170 /* Make a valid memory address and copy constants through pseudo-regs,
171 but not for a constant address if -fno-function-cse. */
172 if (GET_CODE (funexp) != SYMBOL_REF)
173 /* If we are using registers for parameters, force the
174 function address into a register now. */
175 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
176 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
177 : memory_address (FUNCTION_MODE, funexp));
178 else if (! sibcallp)
180 #ifndef NO_FUNCTION_CSE
181 if (optimize && ! flag_no_function_cse)
182 funexp = force_reg (Pmode, funexp);
183 #endif
186 if (static_chain_value != 0)
188 static_chain_value = convert_memory_address (Pmode, static_chain_value);
189 emit_move_insn (static_chain_rtx, static_chain_value);
191 if (REG_P (static_chain_rtx))
192 use_reg (call_fusage, static_chain_rtx);
195 return funexp;
198 /* Generate instructions to call function FUNEXP,
199 and optionally pop the results.
200 The CALL_INSN is the first insn generated.
202 FNDECL is the declaration node of the function. This is given to the
203 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
205 FUNTYPE is the data type of the function. This is given to the macro
206 RETURN_POPS_ARGS to determine whether this function pops its own args.
207 We used to allow an identifier for library functions, but that doesn't
208 work when the return type is an aggregate type and the calling convention
209 says that the pointer to this aggregate is to be popped by the callee.
211 STACK_SIZE is the number of bytes of arguments on the stack,
212 ROUNDED_STACK_SIZE is that number rounded up to
213 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
214 both to put into the call insn and to generate explicit popping
215 code if necessary.
217 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
218 It is zero if this call doesn't want a structure value.
220 NEXT_ARG_REG is the rtx that results from executing
221 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
222 just after all the args have had their registers assigned.
223 This could be whatever you like, but normally it is the first
224 arg-register beyond those used for args in this call,
225 or 0 if all the arg-registers are used in this call.
226 It is passed on to `gen_call' so you can put this info in the call insn.
228 VALREG is a hard register in which a value is returned,
229 or 0 if the call does not return a value.
231 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
232 the args to this call were processed.
233 We restore `inhibit_defer_pop' to that value.
235 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
236 denote registers used by the called function. */
238 static void
239 emit_call_1 (rtx funexp, tree fntree, tree fndecl ATTRIBUTE_UNUSED,
240 tree funtype ATTRIBUTE_UNUSED,
241 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
242 HOST_WIDE_INT rounded_stack_size,
243 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
244 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
245 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
246 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
248 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
249 rtx call_insn;
250 int already_popped = 0;
251 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
252 #if defined (HAVE_call) && defined (HAVE_call_value)
253 rtx struct_value_size_rtx;
254 struct_value_size_rtx = GEN_INT (struct_value_size);
255 #endif
257 #ifdef CALL_POPS_ARGS
258 n_popped += CALL_POPS_ARGS (* args_so_far);
259 #endif
261 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
262 and we don't want to load it into a register as an optimization,
263 because prepare_call_address already did it if it should be done. */
264 if (GET_CODE (funexp) != SYMBOL_REF)
265 funexp = memory_address (FUNCTION_MODE, funexp);
267 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
268 if ((ecf_flags & ECF_SIBCALL)
269 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
270 && (n_popped > 0 || stack_size == 0))
272 rtx n_pop = GEN_INT (n_popped);
273 rtx pat;
275 /* If this subroutine pops its own args, record that in the call insn
276 if possible, for the sake of frame pointer elimination. */
278 if (valreg)
279 pat = GEN_SIBCALL_VALUE_POP (valreg,
280 gen_rtx_MEM (FUNCTION_MODE, funexp),
281 rounded_stack_size_rtx, next_arg_reg,
282 n_pop);
283 else
284 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
285 rounded_stack_size_rtx, next_arg_reg, n_pop);
287 emit_call_insn (pat);
288 already_popped = 1;
290 else
291 #endif
293 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
294 /* If the target has "call" or "call_value" insns, then prefer them
295 if no arguments are actually popped. If the target does not have
296 "call" or "call_value" insns, then we must use the popping versions
297 even if the call has no arguments to pop. */
298 #if defined (HAVE_call) && defined (HAVE_call_value)
299 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
300 && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
301 #else
302 if (HAVE_call_pop && HAVE_call_value_pop)
303 #endif
305 rtx n_pop = GEN_INT (n_popped);
306 rtx pat;
308 /* If this subroutine pops its own args, record that in the call insn
309 if possible, for the sake of frame pointer elimination. */
311 if (valreg)
312 pat = GEN_CALL_VALUE_POP (valreg,
313 gen_rtx_MEM (FUNCTION_MODE, funexp),
314 rounded_stack_size_rtx, next_arg_reg, n_pop);
315 else
316 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
317 rounded_stack_size_rtx, next_arg_reg, n_pop);
319 emit_call_insn (pat);
320 already_popped = 1;
322 else
323 #endif
325 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
326 if ((ecf_flags & ECF_SIBCALL)
327 && HAVE_sibcall && HAVE_sibcall_value)
329 if (valreg)
330 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
331 gen_rtx_MEM (FUNCTION_MODE, funexp),
332 rounded_stack_size_rtx,
333 next_arg_reg, NULL_RTX));
334 else
335 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
336 rounded_stack_size_rtx, next_arg_reg,
337 struct_value_size_rtx));
339 else
340 #endif
342 #if defined (HAVE_call) && defined (HAVE_call_value)
343 if (HAVE_call && HAVE_call_value)
345 if (valreg)
346 emit_call_insn (GEN_CALL_VALUE (valreg,
347 gen_rtx_MEM (FUNCTION_MODE, funexp),
348 rounded_stack_size_rtx, next_arg_reg,
349 NULL_RTX));
350 else
351 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
352 rounded_stack_size_rtx, next_arg_reg,
353 struct_value_size_rtx));
355 else
356 #endif
357 gcc_unreachable ();
359 /* Find the call we just emitted. */
360 call_insn = last_call_insn ();
362 /* Mark memory as used for "pure" function call. */
363 if (ecf_flags & ECF_PURE)
364 call_fusage
365 = gen_rtx_EXPR_LIST
366 (VOIDmode,
367 gen_rtx_USE (VOIDmode,
368 gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
369 call_fusage);
371 /* Put the register usage information there. */
372 add_function_usage_to (call_insn, call_fusage);
374 /* If this is a const call, then set the insn's unchanging bit. */
375 if (ecf_flags & (ECF_CONST | ECF_PURE))
376 CONST_OR_PURE_CALL_P (call_insn) = 1;
378 /* If this call can't throw, attach a REG_EH_REGION reg note to that
379 effect. */
380 if (ecf_flags & ECF_NOTHROW)
381 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
382 REG_NOTES (call_insn));
383 else
385 int rn = lookup_stmt_eh_region (fntree);
387 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't
388 throw, which we already took care of. */
389 if (rn > 0)
390 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
391 REG_NOTES (call_insn));
392 note_current_region_may_contain_throw ();
395 if (ecf_flags & ECF_NORETURN)
396 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
397 REG_NOTES (call_insn));
399 if (ecf_flags & ECF_RETURNS_TWICE)
401 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
402 REG_NOTES (call_insn));
403 current_function_calls_setjmp = 1;
406 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
408 /* Restore this now, so that we do defer pops for this call's args
409 if the context of the call as a whole permits. */
410 inhibit_defer_pop = old_inhibit_defer_pop;
412 if (n_popped > 0)
414 if (!already_popped)
415 CALL_INSN_FUNCTION_USAGE (call_insn)
416 = gen_rtx_EXPR_LIST (VOIDmode,
417 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
418 CALL_INSN_FUNCTION_USAGE (call_insn));
419 rounded_stack_size -= n_popped;
420 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
421 stack_pointer_delta -= n_popped;
424 if (!ACCUMULATE_OUTGOING_ARGS)
426 /* If returning from the subroutine does not automatically pop the args,
427 we need an instruction to pop them sooner or later.
428 Perhaps do it now; perhaps just record how much space to pop later.
430 If returning from the subroutine does pop the args, indicate that the
431 stack pointer will be changed. */
433 if (rounded_stack_size != 0)
435 if (ecf_flags & (ECF_SP_DEPRESSED | ECF_NORETURN))
436 /* Just pretend we did the pop. */
437 stack_pointer_delta -= rounded_stack_size;
438 else if (flag_defer_pop && inhibit_defer_pop == 0
439 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
440 pending_stack_adjust += rounded_stack_size;
441 else
442 adjust_stack (rounded_stack_size_rtx);
445 /* When we accumulate outgoing args, we must avoid any stack manipulations.
446 Restore the stack pointer to its original value now. Usually
447 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
448 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
449 popping variants of functions exist as well.
451 ??? We may optimize similar to defer_pop above, but it is
452 probably not worthwhile.
454 ??? It will be worthwhile to enable combine_stack_adjustments even for
455 such machines. */
456 else if (n_popped)
457 anti_adjust_stack (GEN_INT (n_popped));
460 /* Determine if the function identified by NAME and FNDECL is one with
461 special properties we wish to know about.
463 For example, if the function might return more than one time (setjmp), then
464 set RETURNS_TWICE to a nonzero value.
466 Similarly set NORETURN if the function is in the longjmp family.
468 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
469 space from the stack such as alloca. */
471 static int
472 special_function_p (tree fndecl, int flags)
474 if (fndecl && DECL_NAME (fndecl)
475 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
476 /* Exclude functions not at the file scope, or not `extern',
477 since they are not the magic functions we would otherwise
478 think they are.
479 FIXME: this should be handled with attributes, not with this
480 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
481 because you can declare fork() inside a function if you
482 wish. */
483 && (DECL_CONTEXT (fndecl) == NULL_TREE
484 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
485 && TREE_PUBLIC (fndecl))
487 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
488 const char *tname = name;
490 /* We assume that alloca will always be called by name. It
491 makes no sense to pass it as a pointer-to-function to
492 anything that does not understand its behavior. */
493 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
494 && name[0] == 'a'
495 && ! strcmp (name, "alloca"))
496 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
497 && name[0] == '_'
498 && ! strcmp (name, "__builtin_alloca"))))
499 flags |= ECF_MAY_BE_ALLOCA;
501 /* Disregard prefix _, __ or __x. */
502 if (name[0] == '_')
504 if (name[1] == '_' && name[2] == 'x')
505 tname += 3;
506 else if (name[1] == '_')
507 tname += 2;
508 else
509 tname += 1;
512 if (tname[0] == 's')
514 if ((tname[1] == 'e'
515 && (! strcmp (tname, "setjmp")
516 || ! strcmp (tname, "setjmp_syscall")))
517 || (tname[1] == 'i'
518 && ! strcmp (tname, "sigsetjmp"))
519 || (tname[1] == 'a'
520 && ! strcmp (tname, "savectx")))
521 flags |= ECF_RETURNS_TWICE;
523 if (tname[1] == 'i'
524 && ! strcmp (tname, "siglongjmp"))
525 flags |= ECF_NORETURN;
527 else if ((tname[0] == 'q' && tname[1] == 's'
528 && ! strcmp (tname, "qsetjmp"))
529 || (tname[0] == 'v' && tname[1] == 'f'
530 && ! strcmp (tname, "vfork")))
531 flags |= ECF_RETURNS_TWICE;
533 else if (tname[0] == 'l' && tname[1] == 'o'
534 && ! strcmp (tname, "longjmp"))
535 flags |= ECF_NORETURN;
538 return flags;
541 /* Return nonzero when FNDECL represents a call to setjmp. */
544 setjmp_call_p (tree fndecl)
546 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
549 /* Return true when exp contains alloca call. */
550 bool
551 alloca_call_p (tree exp)
553 if (TREE_CODE (exp) == CALL_EXPR
554 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
555 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
556 == FUNCTION_DECL)
557 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
558 0) & ECF_MAY_BE_ALLOCA))
559 return true;
560 return false;
563 /* Detect flags (function attributes) from the function decl or type node. */
566 flags_from_decl_or_type (tree exp)
568 int flags = 0;
569 tree type = exp;
571 if (DECL_P (exp))
573 type = TREE_TYPE (exp);
575 /* The function exp may have the `malloc' attribute. */
576 if (DECL_IS_MALLOC (exp))
577 flags |= ECF_MALLOC;
579 /* The function exp may have the `returns_twice' attribute. */
580 if (DECL_IS_RETURNS_TWICE (exp))
581 flags |= ECF_RETURNS_TWICE;
583 /* The function exp may have the `pure' attribute. */
584 if (DECL_IS_PURE (exp))
585 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
587 if (DECL_IS_NOVOPS (exp))
588 flags |= ECF_NOVOPS;
590 if (TREE_NOTHROW (exp))
591 flags |= ECF_NOTHROW;
593 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
594 flags |= ECF_LIBCALL_BLOCK | ECF_CONST;
596 flags = special_function_p (exp, flags);
598 else if (TYPE_P (exp) && TYPE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
599 flags |= ECF_CONST;
601 if (TREE_THIS_VOLATILE (exp))
602 flags |= ECF_NORETURN;
604 /* Mark if the function returns with the stack pointer depressed. We
605 cannot consider it pure or constant in that case. */
606 if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
608 flags |= ECF_SP_DEPRESSED;
609 flags &= ~(ECF_PURE | ECF_CONST | ECF_LIBCALL_BLOCK);
612 return flags;
615 /* Detect flags from a CALL_EXPR. */
618 call_expr_flags (tree t)
620 int flags;
621 tree decl = get_callee_fndecl (t);
623 if (decl)
624 flags = flags_from_decl_or_type (decl);
625 else
627 t = TREE_TYPE (TREE_OPERAND (t, 0));
628 if (t && TREE_CODE (t) == POINTER_TYPE)
629 flags = flags_from_decl_or_type (TREE_TYPE (t));
630 else
631 flags = 0;
634 return flags;
637 /* Precompute all register parameters as described by ARGS, storing values
638 into fields within the ARGS array.
640 NUM_ACTUALS indicates the total number elements in the ARGS array.
642 Set REG_PARM_SEEN if we encounter a register parameter. */
644 static void
645 precompute_register_parameters (int num_actuals, struct arg_data *args,
646 int *reg_parm_seen)
648 int i;
650 *reg_parm_seen = 0;
652 for (i = 0; i < num_actuals; i++)
653 if (args[i].reg != 0 && ! args[i].pass_on_stack)
655 *reg_parm_seen = 1;
657 if (args[i].value == 0)
659 push_temp_slots ();
660 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
661 VOIDmode, 0);
662 preserve_temp_slots (args[i].value);
663 pop_temp_slots ();
666 /* If the value is a non-legitimate constant, force it into a
667 pseudo now. TLS symbols sometimes need a call to resolve. */
668 if (CONSTANT_P (args[i].value)
669 && !LEGITIMATE_CONSTANT_P (args[i].value))
670 args[i].value = force_reg (args[i].mode, args[i].value);
672 /* If we are to promote the function arg to a wider mode,
673 do it now. */
675 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
676 args[i].value
677 = convert_modes (args[i].mode,
678 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
679 args[i].value, args[i].unsignedp);
681 /* If we're going to have to load the value by parts, pull the
682 parts into pseudos. The part extraction process can involve
683 non-trivial computation. */
684 if (GET_CODE (args[i].reg) == PARALLEL)
686 tree type = TREE_TYPE (args[i].tree_value);
687 args[i].parallel_value
688 = emit_group_load_into_temps (args[i].reg, args[i].value,
689 type, int_size_in_bytes (type));
692 /* If the value is expensive, and we are inside an appropriately
693 short loop, put the value into a pseudo and then put the pseudo
694 into the hard reg.
696 For small register classes, also do this if this call uses
697 register parameters. This is to avoid reload conflicts while
698 loading the parameters registers. */
700 else if ((! (REG_P (args[i].value)
701 || (GET_CODE (args[i].value) == SUBREG
702 && REG_P (SUBREG_REG (args[i].value)))))
703 && args[i].mode != BLKmode
704 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
705 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
706 || optimize))
707 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
711 #ifdef REG_PARM_STACK_SPACE
713 /* The argument list is the property of the called routine and it
714 may clobber it. If the fixed area has been used for previous
715 parameters, we must save and restore it. */
717 static rtx
718 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
720 int low;
721 int high;
723 /* Compute the boundary of the area that needs to be saved, if any. */
724 high = reg_parm_stack_space;
725 #ifdef ARGS_GROW_DOWNWARD
726 high += 1;
727 #endif
728 if (high > highest_outgoing_arg_in_use)
729 high = highest_outgoing_arg_in_use;
731 for (low = 0; low < high; low++)
732 if (stack_usage_map[low] != 0)
734 int num_to_save;
735 enum machine_mode save_mode;
736 int delta;
737 rtx stack_area;
738 rtx save_area;
740 while (stack_usage_map[--high] == 0)
743 *low_to_save = low;
744 *high_to_save = high;
746 num_to_save = high - low + 1;
747 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
749 /* If we don't have the required alignment, must do this
750 in BLKmode. */
751 if ((low & (MIN (GET_MODE_SIZE (save_mode),
752 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
753 save_mode = BLKmode;
755 #ifdef ARGS_GROW_DOWNWARD
756 delta = -high;
757 #else
758 delta = low;
759 #endif
760 stack_area = gen_rtx_MEM (save_mode,
761 memory_address (save_mode,
762 plus_constant (argblock,
763 delta)));
765 set_mem_align (stack_area, PARM_BOUNDARY);
766 if (save_mode == BLKmode)
768 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
769 emit_block_move (validize_mem (save_area), stack_area,
770 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
772 else
774 save_area = gen_reg_rtx (save_mode);
775 emit_move_insn (save_area, stack_area);
778 return save_area;
781 return NULL_RTX;
784 static void
785 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
787 enum machine_mode save_mode = GET_MODE (save_area);
788 int delta;
789 rtx stack_area;
791 #ifdef ARGS_GROW_DOWNWARD
792 delta = -high_to_save;
793 #else
794 delta = low_to_save;
795 #endif
796 stack_area = gen_rtx_MEM (save_mode,
797 memory_address (save_mode,
798 plus_constant (argblock, delta)));
799 set_mem_align (stack_area, PARM_BOUNDARY);
801 if (save_mode != BLKmode)
802 emit_move_insn (stack_area, save_area);
803 else
804 emit_block_move (stack_area, validize_mem (save_area),
805 GEN_INT (high_to_save - low_to_save + 1),
806 BLOCK_OP_CALL_PARM);
808 #endif /* REG_PARM_STACK_SPACE */
810 /* If any elements in ARGS refer to parameters that are to be passed in
811 registers, but not in memory, and whose alignment does not permit a
812 direct copy into registers. Copy the values into a group of pseudos
813 which we will later copy into the appropriate hard registers.
815 Pseudos for each unaligned argument will be stored into the array
816 args[argnum].aligned_regs. The caller is responsible for deallocating
817 the aligned_regs array if it is nonzero. */
819 static void
820 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
822 int i, j;
824 for (i = 0; i < num_actuals; i++)
825 if (args[i].reg != 0 && ! args[i].pass_on_stack
826 && args[i].mode == BLKmode
827 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
828 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
830 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
831 int endian_correction = 0;
833 if (args[i].partial)
835 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
836 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
838 else
840 args[i].n_aligned_regs
841 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
844 args[i].aligned_regs = xmalloc (sizeof (rtx) * args[i].n_aligned_regs);
846 /* Structures smaller than a word are normally aligned to the
847 least significant byte. On a BYTES_BIG_ENDIAN machine,
848 this means we must skip the empty high order bytes when
849 calculating the bit offset. */
850 if (bytes < UNITS_PER_WORD
851 #ifdef BLOCK_REG_PADDING
852 && (BLOCK_REG_PADDING (args[i].mode,
853 TREE_TYPE (args[i].tree_value), 1)
854 == downward)
855 #else
856 && BYTES_BIG_ENDIAN
857 #endif
859 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
861 for (j = 0; j < args[i].n_aligned_regs; j++)
863 rtx reg = gen_reg_rtx (word_mode);
864 rtx word = operand_subword_force (args[i].value, j, BLKmode);
865 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
867 args[i].aligned_regs[j] = reg;
868 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
869 word_mode, word_mode);
871 /* There is no need to restrict this code to loading items
872 in TYPE_ALIGN sized hunks. The bitfield instructions can
873 load up entire word sized registers efficiently.
875 ??? This may not be needed anymore.
876 We use to emit a clobber here but that doesn't let later
877 passes optimize the instructions we emit. By storing 0 into
878 the register later passes know the first AND to zero out the
879 bitfield being set in the register is unnecessary. The store
880 of 0 will be deleted as will at least the first AND. */
882 emit_move_insn (reg, const0_rtx);
884 bytes -= bitsize / BITS_PER_UNIT;
885 store_bit_field (reg, bitsize, endian_correction, word_mode,
886 word);
891 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
892 ACTPARMS.
894 NUM_ACTUALS is the total number of parameters.
896 N_NAMED_ARGS is the total number of named arguments.
898 FNDECL is the tree code for the target of this call (if known)
900 ARGS_SO_FAR holds state needed by the target to know where to place
901 the next argument.
903 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
904 for arguments which are passed in registers.
906 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
907 and may be modified by this routine.
909 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
910 flags which may may be modified by this routine.
912 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
913 that requires allocation of stack space.
915 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
916 the thunked-to function. */
918 static void
919 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
920 struct arg_data *args,
921 struct args_size *args_size,
922 int n_named_args ATTRIBUTE_UNUSED,
923 tree actparms, tree fndecl,
924 CUMULATIVE_ARGS *args_so_far,
925 int reg_parm_stack_space,
926 rtx *old_stack_level, int *old_pending_adj,
927 int *must_preallocate, int *ecf_flags,
928 bool *may_tailcall, bool call_from_thunk_p)
930 /* 1 if scanning parms front to back, -1 if scanning back to front. */
931 int inc;
933 /* Count arg position in order args appear. */
934 int argpos;
936 int i;
937 tree p;
939 args_size->constant = 0;
940 args_size->var = 0;
942 /* In this loop, we consider args in the order they are written.
943 We fill up ARGS from the front or from the back if necessary
944 so that in any case the first arg to be pushed ends up at the front. */
946 if (PUSH_ARGS_REVERSED)
948 i = num_actuals - 1, inc = -1;
949 /* In this case, must reverse order of args
950 so that we compute and push the last arg first. */
952 else
954 i = 0, inc = 1;
957 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
958 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
960 tree type = TREE_TYPE (TREE_VALUE (p));
961 int unsignedp;
962 enum machine_mode mode;
964 args[i].tree_value = TREE_VALUE (p);
966 /* Replace erroneous argument with constant zero. */
967 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
968 args[i].tree_value = integer_zero_node, type = integer_type_node;
970 /* If TYPE is a transparent union, pass things the way we would
971 pass the first field of the union. We have already verified that
972 the modes are the same. */
973 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
974 type = TREE_TYPE (TYPE_FIELDS (type));
976 /* Decide where to pass this arg.
978 args[i].reg is nonzero if all or part is passed in registers.
980 args[i].partial is nonzero if part but not all is passed in registers,
981 and the exact value says how many bytes are passed in registers.
983 args[i].pass_on_stack is nonzero if the argument must at least be
984 computed on the stack. It may then be loaded back into registers
985 if args[i].reg is nonzero.
987 These decisions are driven by the FUNCTION_... macros and must agree
988 with those made by function.c. */
990 /* See if this argument should be passed by invisible reference. */
991 if (pass_by_reference (args_so_far, TYPE_MODE (type),
992 type, argpos < n_named_args))
994 bool callee_copies;
995 tree base;
997 callee_copies
998 = reference_callee_copied (args_so_far, TYPE_MODE (type),
999 type, argpos < n_named_args);
1001 /* If we're compiling a thunk, pass through invisible references
1002 instead of making a copy. */
1003 if (call_from_thunk_p
1004 || (callee_copies
1005 && !TREE_ADDRESSABLE (type)
1006 && (base = get_base_address (args[i].tree_value))
1007 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
1009 /* We can't use sibcalls if a callee-copied argument is
1010 stored in the current function's frame. */
1011 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1012 *may_tailcall = false;
1014 args[i].tree_value = build_fold_addr_expr (args[i].tree_value);
1015 type = TREE_TYPE (args[i].tree_value);
1017 *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK);
1019 else
1021 /* We make a copy of the object and pass the address to the
1022 function being called. */
1023 rtx copy;
1025 if (!COMPLETE_TYPE_P (type)
1026 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1027 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1028 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1029 STACK_CHECK_MAX_VAR_SIZE))))
1031 /* This is a variable-sized object. Make space on the stack
1032 for it. */
1033 rtx size_rtx = expr_size (TREE_VALUE (p));
1035 if (*old_stack_level == 0)
1037 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1038 *old_pending_adj = pending_stack_adjust;
1039 pending_stack_adjust = 0;
1042 copy = gen_rtx_MEM (BLKmode,
1043 allocate_dynamic_stack_space
1044 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1045 set_mem_attributes (copy, type, 1);
1047 else
1048 copy = assign_temp (type, 0, 1, 0);
1050 store_expr (args[i].tree_value, copy, 0);
1052 if (callee_copies)
1053 *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK);
1054 else
1055 *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1057 args[i].tree_value
1058 = build_fold_addr_expr (make_tree (type, copy));
1059 type = TREE_TYPE (args[i].tree_value);
1060 *may_tailcall = false;
1064 mode = TYPE_MODE (type);
1065 unsignedp = TYPE_UNSIGNED (type);
1067 if (targetm.calls.promote_function_args (fndecl ? TREE_TYPE (fndecl) : 0))
1068 mode = promote_mode (type, mode, &unsignedp, 1);
1070 args[i].unsignedp = unsignedp;
1071 args[i].mode = mode;
1073 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1074 argpos < n_named_args);
1075 #ifdef FUNCTION_INCOMING_ARG
1076 /* If this is a sibling call and the machine has register windows, the
1077 register window has to be unwinded before calling the routine, so
1078 arguments have to go into the incoming registers. */
1079 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1080 argpos < n_named_args);
1081 #else
1082 args[i].tail_call_reg = args[i].reg;
1083 #endif
1085 if (args[i].reg)
1086 args[i].partial
1087 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1088 argpos < n_named_args);
1090 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
1092 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1093 it means that we are to pass this arg in the register(s) designated
1094 by the PARALLEL, but also to pass it in the stack. */
1095 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1096 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1097 args[i].pass_on_stack = 1;
1099 /* If this is an addressable type, we must preallocate the stack
1100 since we must evaluate the object into its final location.
1102 If this is to be passed in both registers and the stack, it is simpler
1103 to preallocate. */
1104 if (TREE_ADDRESSABLE (type)
1105 || (args[i].pass_on_stack && args[i].reg != 0))
1106 *must_preallocate = 1;
1108 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1109 we cannot consider this function call constant. */
1110 if (TREE_ADDRESSABLE (type))
1111 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1113 /* Compute the stack-size of this argument. */
1114 if (args[i].reg == 0 || args[i].partial != 0
1115 || reg_parm_stack_space > 0
1116 || args[i].pass_on_stack)
1117 locate_and_pad_parm (mode, type,
1118 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1120 #else
1121 args[i].reg != 0,
1122 #endif
1123 args[i].pass_on_stack ? 0 : args[i].partial,
1124 fndecl, args_size, &args[i].locate);
1125 #ifdef BLOCK_REG_PADDING
1126 else
1127 /* The argument is passed entirely in registers. See at which
1128 end it should be padded. */
1129 args[i].locate.where_pad =
1130 BLOCK_REG_PADDING (mode, type,
1131 int_size_in_bytes (type) <= UNITS_PER_WORD);
1132 #endif
1134 /* Update ARGS_SIZE, the total stack space for args so far. */
1136 args_size->constant += args[i].locate.size.constant;
1137 if (args[i].locate.size.var)
1138 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1140 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1141 have been used, etc. */
1143 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1144 argpos < n_named_args);
1148 /* Update ARGS_SIZE to contain the total size for the argument block.
1149 Return the original constant component of the argument block's size.
1151 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1152 for arguments passed in registers. */
1154 static int
1155 compute_argument_block_size (int reg_parm_stack_space,
1156 struct args_size *args_size,
1157 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1159 int unadjusted_args_size = args_size->constant;
1161 /* For accumulate outgoing args mode we don't need to align, since the frame
1162 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1163 backends from generating misaligned frame sizes. */
1164 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1165 preferred_stack_boundary = STACK_BOUNDARY;
1167 /* Compute the actual size of the argument block required. The variable
1168 and constant sizes must be combined, the size may have to be rounded,
1169 and there may be a minimum required size. */
1171 if (args_size->var)
1173 args_size->var = ARGS_SIZE_TREE (*args_size);
1174 args_size->constant = 0;
1176 preferred_stack_boundary /= BITS_PER_UNIT;
1177 if (preferred_stack_boundary > 1)
1179 /* We don't handle this case yet. To handle it correctly we have
1180 to add the delta, round and subtract the delta.
1181 Currently no machine description requires this support. */
1182 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
1183 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1186 if (reg_parm_stack_space > 0)
1188 args_size->var
1189 = size_binop (MAX_EXPR, args_size->var,
1190 ssize_int (reg_parm_stack_space));
1192 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1193 /* The area corresponding to register parameters is not to count in
1194 the size of the block we need. So make the adjustment. */
1195 args_size->var
1196 = size_binop (MINUS_EXPR, args_size->var,
1197 ssize_int (reg_parm_stack_space));
1198 #endif
1201 else
1203 preferred_stack_boundary /= BITS_PER_UNIT;
1204 if (preferred_stack_boundary < 1)
1205 preferred_stack_boundary = 1;
1206 args_size->constant = (((args_size->constant
1207 + stack_pointer_delta
1208 + preferred_stack_boundary - 1)
1209 / preferred_stack_boundary
1210 * preferred_stack_boundary)
1211 - stack_pointer_delta);
1213 args_size->constant = MAX (args_size->constant,
1214 reg_parm_stack_space);
1216 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1217 args_size->constant -= reg_parm_stack_space;
1218 #endif
1220 return unadjusted_args_size;
1223 /* Precompute parameters as needed for a function call.
1225 FLAGS is mask of ECF_* constants.
1227 NUM_ACTUALS is the number of arguments.
1229 ARGS is an array containing information for each argument; this
1230 routine fills in the INITIAL_VALUE and VALUE fields for each
1231 precomputed argument. */
1233 static void
1234 precompute_arguments (int flags, int num_actuals, struct arg_data *args)
1236 int i;
1238 /* If this is a libcall, then precompute all arguments so that we do not
1239 get extraneous instructions emitted as part of the libcall sequence. */
1240 if ((flags & ECF_LIBCALL_BLOCK) == 0)
1241 return;
1243 for (i = 0; i < num_actuals; i++)
1245 enum machine_mode mode;
1247 /* If this is an addressable type, we cannot pre-evaluate it. */
1248 gcc_assert (!TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)));
1250 args[i].initial_value = args[i].value
1251 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1253 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1254 if (mode != args[i].mode)
1256 args[i].value
1257 = convert_modes (args[i].mode, mode,
1258 args[i].value, args[i].unsignedp);
1259 #if defined(PROMOTE_FUNCTION_MODE) && !defined(PROMOTE_MODE)
1260 /* CSE will replace this only if it contains args[i].value
1261 pseudo, so convert it down to the declared mode using
1262 a SUBREG. */
1263 if (REG_P (args[i].value)
1264 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1266 args[i].initial_value
1267 = gen_lowpart_SUBREG (mode, args[i].value);
1268 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1269 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1270 args[i].unsignedp);
1272 #endif
1277 /* Given the current state of MUST_PREALLOCATE and information about
1278 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1279 compute and return the final value for MUST_PREALLOCATE. */
1281 static int
1282 finalize_must_preallocate (int must_preallocate, int num_actuals, struct arg_data *args, struct args_size *args_size)
1284 /* See if we have or want to preallocate stack space.
1286 If we would have to push a partially-in-regs parm
1287 before other stack parms, preallocate stack space instead.
1289 If the size of some parm is not a multiple of the required stack
1290 alignment, we must preallocate.
1292 If the total size of arguments that would otherwise create a copy in
1293 a temporary (such as a CALL) is more than half the total argument list
1294 size, preallocation is faster.
1296 Another reason to preallocate is if we have a machine (like the m88k)
1297 where stack alignment is required to be maintained between every
1298 pair of insns, not just when the call is made. However, we assume here
1299 that such machines either do not have push insns (and hence preallocation
1300 would occur anyway) or the problem is taken care of with
1301 PUSH_ROUNDING. */
1303 if (! must_preallocate)
1305 int partial_seen = 0;
1306 int copy_to_evaluate_size = 0;
1307 int i;
1309 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1311 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1312 partial_seen = 1;
1313 else if (partial_seen && args[i].reg == 0)
1314 must_preallocate = 1;
1316 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1317 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1318 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1319 || TREE_CODE (args[i].tree_value) == COND_EXPR
1320 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1321 copy_to_evaluate_size
1322 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1325 if (copy_to_evaluate_size * 2 >= args_size->constant
1326 && args_size->constant > 0)
1327 must_preallocate = 1;
1329 return must_preallocate;
1332 /* If we preallocated stack space, compute the address of each argument
1333 and store it into the ARGS array.
1335 We need not ensure it is a valid memory address here; it will be
1336 validized when it is used.
1338 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1340 static void
1341 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1343 if (argblock)
1345 rtx arg_reg = argblock;
1346 int i, arg_offset = 0;
1348 if (GET_CODE (argblock) == PLUS)
1349 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1351 for (i = 0; i < num_actuals; i++)
1353 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1354 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1355 rtx addr;
1356 unsigned int align, boundary;
1358 /* Skip this parm if it will not be passed on the stack. */
1359 if (! args[i].pass_on_stack && args[i].reg != 0)
1360 continue;
1362 if (GET_CODE (offset) == CONST_INT)
1363 addr = plus_constant (arg_reg, INTVAL (offset));
1364 else
1365 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1367 addr = plus_constant (addr, arg_offset);
1368 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1369 set_mem_attributes (args[i].stack,
1370 TREE_TYPE (args[i].tree_value), 1);
1371 align = BITS_PER_UNIT;
1372 boundary = args[i].locate.boundary;
1373 if (args[i].locate.where_pad != downward)
1374 align = boundary;
1375 else if (GET_CODE (offset) == CONST_INT)
1377 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1378 align = align & -align;
1380 set_mem_align (args[i].stack, align);
1382 if (GET_CODE (slot_offset) == CONST_INT)
1383 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1384 else
1385 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1387 addr = plus_constant (addr, arg_offset);
1388 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1389 set_mem_attributes (args[i].stack_slot,
1390 TREE_TYPE (args[i].tree_value), 1);
1391 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
1393 /* Function incoming arguments may overlap with sibling call
1394 outgoing arguments and we cannot allow reordering of reads
1395 from function arguments with stores to outgoing arguments
1396 of sibling calls. */
1397 set_mem_alias_set (args[i].stack, 0);
1398 set_mem_alias_set (args[i].stack_slot, 0);
1403 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1404 in a call instruction.
1406 FNDECL is the tree node for the target function. For an indirect call
1407 FNDECL will be NULL_TREE.
1409 ADDR is the operand 0 of CALL_EXPR for this call. */
1411 static rtx
1412 rtx_for_function_call (tree fndecl, tree addr)
1414 rtx funexp;
1416 /* Get the function to call, in the form of RTL. */
1417 if (fndecl)
1419 /* If this is the first use of the function, see if we need to
1420 make an external definition for it. */
1421 if (! TREE_USED (fndecl))
1423 assemble_external (fndecl);
1424 TREE_USED (fndecl) = 1;
1427 /* Get a SYMBOL_REF rtx for the function address. */
1428 funexp = XEXP (DECL_RTL (fndecl), 0);
1430 else
1431 /* Generate an rtx (probably a pseudo-register) for the address. */
1433 push_temp_slots ();
1434 funexp = expand_expr (addr, NULL_RTX, VOIDmode, 0);
1435 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1437 return funexp;
1440 /* Do the register loads required for any wholly-register parms or any
1441 parms which are passed both on the stack and in a register. Their
1442 expressions were already evaluated.
1444 Mark all register-parms as living through the call, putting these USE
1445 insns in the CALL_INSN_FUNCTION_USAGE field.
1447 When IS_SIBCALL, perform the check_sibcall_overlap_argument_overlap
1448 checking, setting *SIBCALL_FAILURE if appropriate. */
1450 static void
1451 load_register_parameters (struct arg_data *args, int num_actuals,
1452 rtx *call_fusage, int flags, int is_sibcall,
1453 int *sibcall_failure)
1455 int i, j;
1457 for (i = 0; i < num_actuals; i++)
1459 rtx reg = ((flags & ECF_SIBCALL)
1460 ? args[i].tail_call_reg : args[i].reg);
1461 if (reg)
1463 int partial = args[i].partial;
1464 int nregs;
1465 int size = 0;
1466 rtx before_arg = get_last_insn ();
1467 /* Set non-negative if we must move a word at a time, even if
1468 just one word (e.g, partial == 4 && mode == DFmode). Set
1469 to -1 if we just use a normal move insn. This value can be
1470 zero if the argument is a zero size structure. */
1471 nregs = -1;
1472 if (GET_CODE (reg) == PARALLEL)
1474 else if (partial)
1476 gcc_assert (partial % UNITS_PER_WORD == 0);
1477 nregs = partial / UNITS_PER_WORD;
1479 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1481 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1482 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1484 else
1485 size = GET_MODE_SIZE (args[i].mode);
1487 /* Handle calls that pass values in multiple non-contiguous
1488 locations. The Irix 6 ABI has examples of this. */
1490 if (GET_CODE (reg) == PARALLEL)
1491 emit_group_move (reg, args[i].parallel_value);
1493 /* If simple case, just do move. If normal partial, store_one_arg
1494 has already loaded the register for us. In all other cases,
1495 load the register(s) from memory. */
1497 else if (nregs == -1)
1499 emit_move_insn (reg, args[i].value);
1500 #ifdef BLOCK_REG_PADDING
1501 /* Handle case where we have a value that needs shifting
1502 up to the msb. eg. a QImode value and we're padding
1503 upward on a BYTES_BIG_ENDIAN machine. */
1504 if (size < UNITS_PER_WORD
1505 && (args[i].locate.where_pad
1506 == (BYTES_BIG_ENDIAN ? upward : downward)))
1508 rtx x;
1509 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1511 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1512 report the whole reg as used. Strictly speaking, the
1513 call only uses SIZE bytes at the msb end, but it doesn't
1514 seem worth generating rtl to say that. */
1515 reg = gen_rtx_REG (word_mode, REGNO (reg));
1516 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
1517 build_int_cst (NULL_TREE, shift),
1518 reg, 1);
1519 if (x != reg)
1520 emit_move_insn (reg, x);
1522 #endif
1525 /* If we have pre-computed the values to put in the registers in
1526 the case of non-aligned structures, copy them in now. */
1528 else if (args[i].n_aligned_regs != 0)
1529 for (j = 0; j < args[i].n_aligned_regs; j++)
1530 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1531 args[i].aligned_regs[j]);
1533 else if (partial == 0 || args[i].pass_on_stack)
1535 rtx mem = validize_mem (args[i].value);
1537 /* Handle a BLKmode that needs shifting. */
1538 if (nregs == 1 && size < UNITS_PER_WORD
1539 #ifdef BLOCK_REG_PADDING
1540 && args[i].locate.where_pad == downward
1541 #else
1542 && BYTES_BIG_ENDIAN
1543 #endif
1546 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1547 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1548 rtx x = gen_reg_rtx (word_mode);
1549 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1550 enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1551 : LSHIFT_EXPR;
1553 emit_move_insn (x, tem);
1554 x = expand_shift (dir, word_mode, x,
1555 build_int_cst (NULL_TREE, shift),
1556 ri, 1);
1557 if (x != ri)
1558 emit_move_insn (ri, x);
1560 else
1561 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1564 /* When a parameter is a block, and perhaps in other cases, it is
1565 possible that it did a load from an argument slot that was
1566 already clobbered. */
1567 if (is_sibcall
1568 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1569 *sibcall_failure = 1;
1571 /* Handle calls that pass values in multiple non-contiguous
1572 locations. The Irix 6 ABI has examples of this. */
1573 if (GET_CODE (reg) == PARALLEL)
1574 use_group_regs (call_fusage, reg);
1575 else if (nregs == -1)
1576 use_reg (call_fusage, reg);
1577 else if (nregs > 0)
1578 use_regs (call_fusage, REGNO (reg), nregs);
1583 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1584 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1585 bytes, then we would need to push some additional bytes to pad the
1586 arguments. So, we compute an adjust to the stack pointer for an
1587 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1588 bytes. Then, when the arguments are pushed the stack will be perfectly
1589 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1590 be popped after the call. Returns the adjustment. */
1592 static int
1593 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1594 struct args_size *args_size,
1595 unsigned int preferred_unit_stack_boundary)
1597 /* The number of bytes to pop so that the stack will be
1598 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1599 HOST_WIDE_INT adjustment;
1600 /* The alignment of the stack after the arguments are pushed, if we
1601 just pushed the arguments without adjust the stack here. */
1602 unsigned HOST_WIDE_INT unadjusted_alignment;
1604 unadjusted_alignment
1605 = ((stack_pointer_delta + unadjusted_args_size)
1606 % preferred_unit_stack_boundary);
1608 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1609 as possible -- leaving just enough left to cancel out the
1610 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1611 PENDING_STACK_ADJUST is non-negative, and congruent to
1612 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1614 /* Begin by trying to pop all the bytes. */
1615 unadjusted_alignment
1616 = (unadjusted_alignment
1617 - (pending_stack_adjust % preferred_unit_stack_boundary));
1618 adjustment = pending_stack_adjust;
1619 /* Push enough additional bytes that the stack will be aligned
1620 after the arguments are pushed. */
1621 if (preferred_unit_stack_boundary > 1)
1623 if (unadjusted_alignment > 0)
1624 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1625 else
1626 adjustment += unadjusted_alignment;
1629 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1630 bytes after the call. The right number is the entire
1631 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1632 by the arguments in the first place. */
1633 args_size->constant
1634 = pending_stack_adjust - adjustment + unadjusted_args_size;
1636 return adjustment;
1639 /* Scan X expression if it does not dereference any argument slots
1640 we already clobbered by tail call arguments (as noted in stored_args_map
1641 bitmap).
1642 Return nonzero if X expression dereferences such argument slots,
1643 zero otherwise. */
1645 static int
1646 check_sibcall_argument_overlap_1 (rtx x)
1648 RTX_CODE code;
1649 int i, j;
1650 unsigned int k;
1651 const char *fmt;
1653 if (x == NULL_RTX)
1654 return 0;
1656 code = GET_CODE (x);
1658 if (code == MEM)
1660 if (XEXP (x, 0) == current_function_internal_arg_pointer)
1661 i = 0;
1662 else if (GET_CODE (XEXP (x, 0)) == PLUS
1663 && XEXP (XEXP (x, 0), 0) ==
1664 current_function_internal_arg_pointer
1665 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
1666 i = INTVAL (XEXP (XEXP (x, 0), 1));
1667 else
1668 return 0;
1670 #ifdef ARGS_GROW_DOWNWARD
1671 i = -i - GET_MODE_SIZE (GET_MODE (x));
1672 #endif
1674 for (k = 0; k < GET_MODE_SIZE (GET_MODE (x)); k++)
1675 if (i + k < stored_args_map->n_bits
1676 && TEST_BIT (stored_args_map, i + k))
1677 return 1;
1679 return 0;
1682 /* Scan all subexpressions. */
1683 fmt = GET_RTX_FORMAT (code);
1684 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1686 if (*fmt == 'e')
1688 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1689 return 1;
1691 else if (*fmt == 'E')
1693 for (j = 0; j < XVECLEN (x, i); j++)
1694 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1695 return 1;
1698 return 0;
1701 /* Scan sequence after INSN if it does not dereference any argument slots
1702 we already clobbered by tail call arguments (as noted in stored_args_map
1703 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1704 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1705 should be 0). Return nonzero if sequence after INSN dereferences such argument
1706 slots, zero otherwise. */
1708 static int
1709 check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
1711 int low, high;
1713 if (insn == NULL_RTX)
1714 insn = get_insns ();
1715 else
1716 insn = NEXT_INSN (insn);
1718 for (; insn; insn = NEXT_INSN (insn))
1719 if (INSN_P (insn)
1720 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
1721 break;
1723 if (mark_stored_args_map)
1725 #ifdef ARGS_GROW_DOWNWARD
1726 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
1727 #else
1728 low = arg->locate.slot_offset.constant;
1729 #endif
1731 for (high = low + arg->locate.size.constant; low < high; low++)
1732 SET_BIT (stored_args_map, low);
1734 return insn != NULL_RTX;
1737 /* Given that a function returns a value of mode MODE at the most
1738 significant end of hard register VALUE, shift VALUE left or right
1739 as specified by LEFT_P. Return true if some action was needed. */
1741 bool
1742 shift_return_value (enum machine_mode mode, bool left_p, rtx value)
1744 HOST_WIDE_INT shift;
1746 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
1747 shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
1748 if (shift == 0)
1749 return false;
1751 /* Use ashr rather than lshr for right shifts. This is for the benefit
1752 of the MIPS port, which requires SImode values to be sign-extended
1753 when stored in 64-bit registers. */
1754 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
1755 value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
1756 gcc_unreachable ();
1757 return true;
1760 /* Generate all the code for a function call
1761 and return an rtx for its value.
1762 Store the value in TARGET (specified as an rtx) if convenient.
1763 If the value is stored in TARGET then TARGET is returned.
1764 If IGNORE is nonzero, then we ignore the value of the function call. */
1767 expand_call (tree exp, rtx target, int ignore)
1769 /* Nonzero if we are currently expanding a call. */
1770 static int currently_expanding_call = 0;
1772 /* List of actual parameters. */
1773 tree actparms = TREE_OPERAND (exp, 1);
1774 /* RTX for the function to be called. */
1775 rtx funexp;
1776 /* Sequence of insns to perform a normal "call". */
1777 rtx normal_call_insns = NULL_RTX;
1778 /* Sequence of insns to perform a tail "call". */
1779 rtx tail_call_insns = NULL_RTX;
1780 /* Data type of the function. */
1781 tree funtype;
1782 tree type_arg_types;
1783 /* Declaration of the function being called,
1784 or 0 if the function is computed (not known by name). */
1785 tree fndecl = 0;
1786 /* The type of the function being called. */
1787 tree fntype;
1788 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
1789 int pass;
1791 /* Register in which non-BLKmode value will be returned,
1792 or 0 if no value or if value is BLKmode. */
1793 rtx valreg;
1794 /* Address where we should return a BLKmode value;
1795 0 if value not BLKmode. */
1796 rtx structure_value_addr = 0;
1797 /* Nonzero if that address is being passed by treating it as
1798 an extra, implicit first parameter. Otherwise,
1799 it is passed by being copied directly into struct_value_rtx. */
1800 int structure_value_addr_parm = 0;
1801 /* Size of aggregate value wanted, or zero if none wanted
1802 or if we are using the non-reentrant PCC calling convention
1803 or expecting the value in registers. */
1804 HOST_WIDE_INT struct_value_size = 0;
1805 /* Nonzero if called function returns an aggregate in memory PCC style,
1806 by returning the address of where to find it. */
1807 int pcc_struct_value = 0;
1808 rtx struct_value = 0;
1810 /* Number of actual parameters in this call, including struct value addr. */
1811 int num_actuals;
1812 /* Number of named args. Args after this are anonymous ones
1813 and they must all go on the stack. */
1814 int n_named_args;
1816 /* Vector of information about each argument.
1817 Arguments are numbered in the order they will be pushed,
1818 not the order they are written. */
1819 struct arg_data *args;
1821 /* Total size in bytes of all the stack-parms scanned so far. */
1822 struct args_size args_size;
1823 struct args_size adjusted_args_size;
1824 /* Size of arguments before any adjustments (such as rounding). */
1825 int unadjusted_args_size;
1826 /* Data on reg parms scanned so far. */
1827 CUMULATIVE_ARGS args_so_far;
1828 /* Nonzero if a reg parm has been scanned. */
1829 int reg_parm_seen;
1830 /* Nonzero if this is an indirect function call. */
1832 /* Nonzero if we must avoid push-insns in the args for this call.
1833 If stack space is allocated for register parameters, but not by the
1834 caller, then it is preallocated in the fixed part of the stack frame.
1835 So the entire argument block must then be preallocated (i.e., we
1836 ignore PUSH_ROUNDING in that case). */
1838 int must_preallocate = !PUSH_ARGS;
1840 /* Size of the stack reserved for parameter registers. */
1841 int reg_parm_stack_space = 0;
1843 /* Address of space preallocated for stack parms
1844 (on machines that lack push insns), or 0 if space not preallocated. */
1845 rtx argblock = 0;
1847 /* Mask of ECF_ flags. */
1848 int flags = 0;
1849 #ifdef REG_PARM_STACK_SPACE
1850 /* Define the boundary of the register parm stack space that needs to be
1851 saved, if any. */
1852 int low_to_save, high_to_save;
1853 rtx save_area = 0; /* Place that it is saved */
1854 #endif
1856 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1857 char *initial_stack_usage_map = stack_usage_map;
1858 char *stack_usage_map_buf = NULL;
1860 int old_stack_allocated;
1862 /* State variables to track stack modifications. */
1863 rtx old_stack_level = 0;
1864 int old_stack_arg_under_construction = 0;
1865 int old_pending_adj = 0;
1866 int old_inhibit_defer_pop = inhibit_defer_pop;
1868 /* Some stack pointer alterations we make are performed via
1869 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
1870 which we then also need to save/restore along the way. */
1871 int old_stack_pointer_delta = 0;
1873 rtx call_fusage;
1874 tree p = TREE_OPERAND (exp, 0);
1875 tree addr = TREE_OPERAND (exp, 0);
1876 int i;
1877 /* The alignment of the stack, in bits. */
1878 unsigned HOST_WIDE_INT preferred_stack_boundary;
1879 /* The alignment of the stack, in bytes. */
1880 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
1881 /* The static chain value to use for this call. */
1882 rtx static_chain_value;
1883 /* See if this is "nothrow" function call. */
1884 if (TREE_NOTHROW (exp))
1885 flags |= ECF_NOTHROW;
1887 /* See if we can find a DECL-node for the actual function, and get the
1888 function attributes (flags) from the function decl or type node. */
1889 fndecl = get_callee_fndecl (exp);
1890 if (fndecl)
1892 fntype = TREE_TYPE (fndecl);
1893 flags |= flags_from_decl_or_type (fndecl);
1895 else
1897 fntype = TREE_TYPE (TREE_TYPE (p));
1898 flags |= flags_from_decl_or_type (fntype);
1901 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
1903 /* Warn if this value is an aggregate type,
1904 regardless of which calling convention we are using for it. */
1905 if (AGGREGATE_TYPE_P (TREE_TYPE (exp)))
1906 warning (OPT_Waggregate_return, "function call has aggregate value");
1908 /* If the result of a pure or const function call is ignored (or void),
1909 and none of its arguments are volatile, we can avoid expanding the
1910 call and just evaluate the arguments for side-effects. */
1911 if ((flags & (ECF_CONST | ECF_PURE))
1912 && (ignore || target == const0_rtx
1913 || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
1915 bool volatilep = false;
1916 tree arg;
1918 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
1919 if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
1921 volatilep = true;
1922 break;
1925 if (! volatilep)
1927 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
1928 expand_expr (TREE_VALUE (arg), const0_rtx,
1929 VOIDmode, EXPAND_NORMAL);
1930 return const0_rtx;
1934 #ifdef REG_PARM_STACK_SPACE
1935 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1936 #endif
1938 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1939 if (reg_parm_stack_space > 0 && PUSH_ARGS)
1940 must_preallocate = 1;
1941 #endif
1943 /* Set up a place to return a structure. */
1945 /* Cater to broken compilers. */
1946 if (aggregate_value_p (exp, fndecl))
1948 /* This call returns a big structure. */
1949 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1951 #ifdef PCC_STATIC_STRUCT_RETURN
1953 pcc_struct_value = 1;
1955 #else /* not PCC_STATIC_STRUCT_RETURN */
1957 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
1959 if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp))
1960 structure_value_addr = XEXP (target, 0);
1961 else
1963 /* For variable-sized objects, we must be called with a target
1964 specified. If we were to allocate space on the stack here,
1965 we would have no way of knowing when to free it. */
1966 rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
1968 mark_temp_addr_taken (d);
1969 structure_value_addr = XEXP (d, 0);
1970 target = 0;
1973 #endif /* not PCC_STATIC_STRUCT_RETURN */
1976 /* Figure out the amount to which the stack should be aligned. */
1977 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1978 if (fndecl)
1980 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
1981 if (i && i->preferred_incoming_stack_boundary)
1982 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
1985 /* Operand 0 is a pointer-to-function; get the type of the function. */
1986 funtype = TREE_TYPE (addr);
1987 gcc_assert (POINTER_TYPE_P (funtype));
1988 funtype = TREE_TYPE (funtype);
1990 /* Munge the tree to split complex arguments into their imaginary
1991 and real parts. */
1992 if (targetm.calls.split_complex_arg)
1994 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
1995 actparms = split_complex_values (actparms);
1997 else
1998 type_arg_types = TYPE_ARG_TYPES (funtype);
2000 if (flags & ECF_MAY_BE_ALLOCA)
2001 current_function_calls_alloca = 1;
2003 /* If struct_value_rtx is 0, it means pass the address
2004 as if it were an extra parameter. */
2005 if (structure_value_addr && struct_value == 0)
2007 /* If structure_value_addr is a REG other than
2008 virtual_outgoing_args_rtx, we can use always use it. If it
2009 is not a REG, we must always copy it into a register.
2010 If it is virtual_outgoing_args_rtx, we must copy it to another
2011 register in some cases. */
2012 rtx temp = (!REG_P (structure_value_addr)
2013 || (ACCUMULATE_OUTGOING_ARGS
2014 && stack_arg_under_construction
2015 && structure_value_addr == virtual_outgoing_args_rtx)
2016 ? copy_addr_to_reg (convert_memory_address
2017 (Pmode, structure_value_addr))
2018 : structure_value_addr);
2020 actparms
2021 = tree_cons (error_mark_node,
2022 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2023 temp),
2024 actparms);
2025 structure_value_addr_parm = 1;
2028 /* Count the arguments and set NUM_ACTUALS. */
2029 for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2030 num_actuals++;
2032 /* Compute number of named args.
2033 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2035 if (type_arg_types != 0)
2036 n_named_args
2037 = (list_length (type_arg_types)
2038 /* Count the struct value address, if it is passed as a parm. */
2039 + structure_value_addr_parm);
2040 else
2041 /* If we know nothing, treat all args as named. */
2042 n_named_args = num_actuals;
2044 /* Start updating where the next arg would go.
2046 On some machines (such as the PA) indirect calls have a different
2047 calling convention than normal calls. The fourth argument in
2048 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2049 or not. */
2050 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl, n_named_args);
2052 /* Now possibly adjust the number of named args.
2053 Normally, don't include the last named arg if anonymous args follow.
2054 We do include the last named arg if
2055 targetm.calls.strict_argument_naming() returns nonzero.
2056 (If no anonymous args follow, the result of list_length is actually
2057 one too large. This is harmless.)
2059 If targetm.calls.pretend_outgoing_varargs_named() returns
2060 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2061 this machine will be able to place unnamed args that were passed
2062 in registers into the stack. So treat all args as named. This
2063 allows the insns emitting for a specific argument list to be
2064 independent of the function declaration.
2066 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2067 we do not have any reliable way to pass unnamed args in
2068 registers, so we must force them into memory. */
2070 if (type_arg_types != 0
2071 && targetm.calls.strict_argument_naming (&args_so_far))
2073 else if (type_arg_types != 0
2074 && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
2075 /* Don't include the last named arg. */
2076 --n_named_args;
2077 else
2078 /* Treat all args as named. */
2079 n_named_args = num_actuals;
2081 /* Make a vector to hold all the information about each arg. */
2082 args = alloca (num_actuals * sizeof (struct arg_data));
2083 memset (args, 0, num_actuals * sizeof (struct arg_data));
2085 /* Build up entries in the ARGS array, compute the size of the
2086 arguments into ARGS_SIZE, etc. */
2087 initialize_argument_information (num_actuals, args, &args_size,
2088 n_named_args, actparms, fndecl,
2089 &args_so_far, reg_parm_stack_space,
2090 &old_stack_level, &old_pending_adj,
2091 &must_preallocate, &flags,
2092 &try_tail_call, CALL_FROM_THUNK_P (exp));
2094 if (args_size.var)
2096 /* If this function requires a variable-sized argument list, don't
2097 try to make a cse'able block for this call. We may be able to
2098 do this eventually, but it is too complicated to keep track of
2099 what insns go in the cse'able block and which don't. */
2101 flags &= ~ECF_LIBCALL_BLOCK;
2102 must_preallocate = 1;
2105 /* Now make final decision about preallocating stack space. */
2106 must_preallocate = finalize_must_preallocate (must_preallocate,
2107 num_actuals, args,
2108 &args_size);
2110 /* If the structure value address will reference the stack pointer, we
2111 must stabilize it. We don't need to do this if we know that we are
2112 not going to adjust the stack pointer in processing this call. */
2114 if (structure_value_addr
2115 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2116 || reg_mentioned_p (virtual_outgoing_args_rtx,
2117 structure_value_addr))
2118 && (args_size.var
2119 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2120 structure_value_addr = copy_to_reg (structure_value_addr);
2122 /* Tail calls can make things harder to debug, and we've traditionally
2123 pushed these optimizations into -O2. Don't try if we're already
2124 expanding a call, as that means we're an argument. Don't try if
2125 there's cleanups, as we know there's code to follow the call. */
2127 if (currently_expanding_call++ != 0
2128 || !flag_optimize_sibling_calls
2129 || args_size.var
2130 || lookup_stmt_eh_region (exp) >= 0)
2131 try_tail_call = 0;
2133 /* Rest of purposes for tail call optimizations to fail. */
2134 if (
2135 #ifdef HAVE_sibcall_epilogue
2136 !HAVE_sibcall_epilogue
2137 #else
2139 #endif
2140 || !try_tail_call
2141 /* Doing sibling call optimization needs some work, since
2142 structure_value_addr can be allocated on the stack.
2143 It does not seem worth the effort since few optimizable
2144 sibling calls will return a structure. */
2145 || structure_value_addr != NULL_RTX
2146 /* Check whether the target is able to optimize the call
2147 into a sibcall. */
2148 || !targetm.function_ok_for_sibcall (fndecl, exp)
2149 /* Functions that do not return exactly once may not be sibcall
2150 optimized. */
2151 || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
2152 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2153 /* If the called function is nested in the current one, it might access
2154 some of the caller's arguments, but could clobber them beforehand if
2155 the argument areas are shared. */
2156 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2157 /* If this function requires more stack slots than the current
2158 function, we cannot change it into a sibling call.
2159 current_function_pretend_args_size is not part of the
2160 stack allocated by our caller. */
2161 || args_size.constant > (current_function_args_size
2162 - current_function_pretend_args_size)
2163 /* If the callee pops its own arguments, then it must pop exactly
2164 the same number of arguments as the current function. */
2165 || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2166 != RETURN_POPS_ARGS (current_function_decl,
2167 TREE_TYPE (current_function_decl),
2168 current_function_args_size))
2169 || !lang_hooks.decls.ok_for_sibcall (fndecl))
2170 try_tail_call = 0;
2172 /* Ensure current function's preferred stack boundary is at least
2173 what we need. We don't have to increase alignment for recursive
2174 functions. */
2175 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2176 && fndecl != current_function_decl)
2177 cfun->preferred_stack_boundary = preferred_stack_boundary;
2178 if (fndecl == current_function_decl)
2179 cfun->recursive_call_emit = true;
2181 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2183 /* We want to make two insn chains; one for a sibling call, the other
2184 for a normal call. We will select one of the two chains after
2185 initial RTL generation is complete. */
2186 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2188 int sibcall_failure = 0;
2189 /* We want to emit any pending stack adjustments before the tail
2190 recursion "call". That way we know any adjustment after the tail
2191 recursion call can be ignored if we indeed use the tail
2192 call expansion. */
2193 int save_pending_stack_adjust = 0;
2194 int save_stack_pointer_delta = 0;
2195 rtx insns;
2196 rtx before_call, next_arg_reg;
2198 if (pass == 0)
2200 /* State variables we need to save and restore between
2201 iterations. */
2202 save_pending_stack_adjust = pending_stack_adjust;
2203 save_stack_pointer_delta = stack_pointer_delta;
2205 if (pass)
2206 flags &= ~ECF_SIBCALL;
2207 else
2208 flags |= ECF_SIBCALL;
2210 /* Other state variables that we must reinitialize each time
2211 through the loop (that are not initialized by the loop itself). */
2212 argblock = 0;
2213 call_fusage = 0;
2215 /* Start a new sequence for the normal call case.
2217 From this point on, if the sibling call fails, we want to set
2218 sibcall_failure instead of continuing the loop. */
2219 start_sequence ();
2221 /* Don't let pending stack adjusts add up to too much.
2222 Also, do all pending adjustments now if there is any chance
2223 this might be a call to alloca or if we are expanding a sibling
2224 call sequence or if we are calling a function that is to return
2225 with stack pointer depressed.
2226 Also do the adjustments before a throwing call, otherwise
2227 exception handling can fail; PR 19225. */
2228 if (pending_stack_adjust >= 32
2229 || (pending_stack_adjust > 0
2230 && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
2231 || (pending_stack_adjust > 0
2232 && flag_exceptions && !(flags & ECF_NOTHROW))
2233 || pass == 0)
2234 do_pending_stack_adjust ();
2236 /* When calling a const function, we must pop the stack args right away,
2237 so that the pop is deleted or moved with the call. */
2238 if (pass && (flags & ECF_LIBCALL_BLOCK))
2239 NO_DEFER_POP;
2241 /* Precompute any arguments as needed. */
2242 if (pass)
2243 precompute_arguments (flags, num_actuals, args);
2245 /* Now we are about to start emitting insns that can be deleted
2246 if a libcall is deleted. */
2247 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
2248 start_sequence ();
2250 adjusted_args_size = args_size;
2251 /* Compute the actual size of the argument block required. The variable
2252 and constant sizes must be combined, the size may have to be rounded,
2253 and there may be a minimum required size. When generating a sibcall
2254 pattern, do not round up, since we'll be re-using whatever space our
2255 caller provided. */
2256 unadjusted_args_size
2257 = compute_argument_block_size (reg_parm_stack_space,
2258 &adjusted_args_size,
2259 (pass == 0 ? 0
2260 : preferred_stack_boundary));
2262 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2264 /* The argument block when performing a sibling call is the
2265 incoming argument block. */
2266 if (pass == 0)
2268 argblock = virtual_incoming_args_rtx;
2269 argblock
2270 #ifdef STACK_GROWS_DOWNWARD
2271 = plus_constant (argblock, current_function_pretend_args_size);
2272 #else
2273 = plus_constant (argblock, -current_function_pretend_args_size);
2274 #endif
2275 stored_args_map = sbitmap_alloc (args_size.constant);
2276 sbitmap_zero (stored_args_map);
2279 /* If we have no actual push instructions, or shouldn't use them,
2280 make space for all args right now. */
2281 else if (adjusted_args_size.var != 0)
2283 if (old_stack_level == 0)
2285 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2286 old_stack_pointer_delta = stack_pointer_delta;
2287 old_pending_adj = pending_stack_adjust;
2288 pending_stack_adjust = 0;
2289 /* stack_arg_under_construction says whether a stack arg is
2290 being constructed at the old stack level. Pushing the stack
2291 gets a clean outgoing argument block. */
2292 old_stack_arg_under_construction = stack_arg_under_construction;
2293 stack_arg_under_construction = 0;
2295 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2297 else
2299 /* Note that we must go through the motions of allocating an argument
2300 block even if the size is zero because we may be storing args
2301 in the area reserved for register arguments, which may be part of
2302 the stack frame. */
2304 int needed = adjusted_args_size.constant;
2306 /* Store the maximum argument space used. It will be pushed by
2307 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2308 checking). */
2310 if (needed > current_function_outgoing_args_size)
2311 current_function_outgoing_args_size = needed;
2313 if (must_preallocate)
2315 if (ACCUMULATE_OUTGOING_ARGS)
2317 /* Since the stack pointer will never be pushed, it is
2318 possible for the evaluation of a parm to clobber
2319 something we have already written to the stack.
2320 Since most function calls on RISC machines do not use
2321 the stack, this is uncommon, but must work correctly.
2323 Therefore, we save any area of the stack that was already
2324 written and that we are using. Here we set up to do this
2325 by making a new stack usage map from the old one. The
2326 actual save will be done by store_one_arg.
2328 Another approach might be to try to reorder the argument
2329 evaluations to avoid this conflicting stack usage. */
2331 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2332 /* Since we will be writing into the entire argument area,
2333 the map must be allocated for its entire size, not just
2334 the part that is the responsibility of the caller. */
2335 needed += reg_parm_stack_space;
2336 #endif
2338 #ifdef ARGS_GROW_DOWNWARD
2339 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2340 needed + 1);
2341 #else
2342 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2343 needed);
2344 #endif
2345 if (stack_usage_map_buf)
2346 free (stack_usage_map_buf);
2347 stack_usage_map_buf = xmalloc (highest_outgoing_arg_in_use);
2348 stack_usage_map = stack_usage_map_buf;
2350 if (initial_highest_arg_in_use)
2351 memcpy (stack_usage_map, initial_stack_usage_map,
2352 initial_highest_arg_in_use);
2354 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2355 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2356 (highest_outgoing_arg_in_use
2357 - initial_highest_arg_in_use));
2358 needed = 0;
2360 /* The address of the outgoing argument list must not be
2361 copied to a register here, because argblock would be left
2362 pointing to the wrong place after the call to
2363 allocate_dynamic_stack_space below. */
2365 argblock = virtual_outgoing_args_rtx;
2367 else
2369 if (inhibit_defer_pop == 0)
2371 /* Try to reuse some or all of the pending_stack_adjust
2372 to get this space. */
2373 needed
2374 = (combine_pending_stack_adjustment_and_call
2375 (unadjusted_args_size,
2376 &adjusted_args_size,
2377 preferred_unit_stack_boundary));
2379 /* combine_pending_stack_adjustment_and_call computes
2380 an adjustment before the arguments are allocated.
2381 Account for them and see whether or not the stack
2382 needs to go up or down. */
2383 needed = unadjusted_args_size - needed;
2385 if (needed < 0)
2387 /* We're releasing stack space. */
2388 /* ??? We can avoid any adjustment at all if we're
2389 already aligned. FIXME. */
2390 pending_stack_adjust = -needed;
2391 do_pending_stack_adjust ();
2392 needed = 0;
2394 else
2395 /* We need to allocate space. We'll do that in
2396 push_block below. */
2397 pending_stack_adjust = 0;
2400 /* Special case this because overhead of `push_block' in
2401 this case is non-trivial. */
2402 if (needed == 0)
2403 argblock = virtual_outgoing_args_rtx;
2404 else
2406 argblock = push_block (GEN_INT (needed), 0, 0);
2407 #ifdef ARGS_GROW_DOWNWARD
2408 argblock = plus_constant (argblock, needed);
2409 #endif
2412 /* We only really need to call `copy_to_reg' in the case
2413 where push insns are going to be used to pass ARGBLOCK
2414 to a function call in ARGS. In that case, the stack
2415 pointer changes value from the allocation point to the
2416 call point, and hence the value of
2417 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2418 as well always do it. */
2419 argblock = copy_to_reg (argblock);
2424 if (ACCUMULATE_OUTGOING_ARGS)
2426 /* The save/restore code in store_one_arg handles all
2427 cases except one: a constructor call (including a C
2428 function returning a BLKmode struct) to initialize
2429 an argument. */
2430 if (stack_arg_under_construction)
2432 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2433 rtx push_size = GEN_INT (reg_parm_stack_space
2434 + adjusted_args_size.constant);
2435 #else
2436 rtx push_size = GEN_INT (adjusted_args_size.constant);
2437 #endif
2438 if (old_stack_level == 0)
2440 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2441 NULL_RTX);
2442 old_stack_pointer_delta = stack_pointer_delta;
2443 old_pending_adj = pending_stack_adjust;
2444 pending_stack_adjust = 0;
2445 /* stack_arg_under_construction says whether a stack
2446 arg is being constructed at the old stack level.
2447 Pushing the stack gets a clean outgoing argument
2448 block. */
2449 old_stack_arg_under_construction
2450 = stack_arg_under_construction;
2451 stack_arg_under_construction = 0;
2452 /* Make a new map for the new argument list. */
2453 if (stack_usage_map_buf)
2454 free (stack_usage_map_buf);
2455 stack_usage_map_buf = xmalloc (highest_outgoing_arg_in_use);
2456 stack_usage_map = stack_usage_map_buf;
2457 memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
2458 highest_outgoing_arg_in_use = 0;
2460 allocate_dynamic_stack_space (push_size, NULL_RTX,
2461 BITS_PER_UNIT);
2464 /* If argument evaluation might modify the stack pointer,
2465 copy the address of the argument list to a register. */
2466 for (i = 0; i < num_actuals; i++)
2467 if (args[i].pass_on_stack)
2469 argblock = copy_addr_to_reg (argblock);
2470 break;
2474 compute_argument_addresses (args, argblock, num_actuals);
2476 /* If we push args individually in reverse order, perform stack alignment
2477 before the first push (the last arg). */
2478 if (PUSH_ARGS_REVERSED && argblock == 0
2479 && adjusted_args_size.constant != unadjusted_args_size)
2481 /* When the stack adjustment is pending, we get better code
2482 by combining the adjustments. */
2483 if (pending_stack_adjust
2484 && ! (flags & ECF_LIBCALL_BLOCK)
2485 && ! inhibit_defer_pop)
2487 pending_stack_adjust
2488 = (combine_pending_stack_adjustment_and_call
2489 (unadjusted_args_size,
2490 &adjusted_args_size,
2491 preferred_unit_stack_boundary));
2492 do_pending_stack_adjust ();
2494 else if (argblock == 0)
2495 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2496 - unadjusted_args_size));
2498 /* Now that the stack is properly aligned, pops can't safely
2499 be deferred during the evaluation of the arguments. */
2500 NO_DEFER_POP;
2502 funexp = rtx_for_function_call (fndecl, addr);
2504 /* Figure out the register where the value, if any, will come back. */
2505 valreg = 0;
2506 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2507 && ! structure_value_addr)
2509 if (pcc_struct_value)
2510 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2511 fndecl, NULL, (pass == 0));
2512 else
2513 valreg = hard_function_value (TREE_TYPE (exp), fndecl, fntype,
2514 (pass == 0));
2517 /* Precompute all register parameters. It isn't safe to compute anything
2518 once we have started filling any specific hard regs. */
2519 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2521 if (TREE_OPERAND (exp, 2))
2522 static_chain_value = expand_expr (TREE_OPERAND (exp, 2),
2523 NULL_RTX, VOIDmode, 0);
2524 else
2525 static_chain_value = 0;
2527 #ifdef REG_PARM_STACK_SPACE
2528 /* Save the fixed argument area if it's part of the caller's frame and
2529 is clobbered by argument setup for this call. */
2530 if (ACCUMULATE_OUTGOING_ARGS && pass)
2531 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2532 &low_to_save, &high_to_save);
2533 #endif
2535 /* Now store (and compute if necessary) all non-register parms.
2536 These come before register parms, since they can require block-moves,
2537 which could clobber the registers used for register parms.
2538 Parms which have partial registers are not stored here,
2539 but we do preallocate space here if they want that. */
2541 for (i = 0; i < num_actuals; i++)
2542 if (args[i].reg == 0 || args[i].pass_on_stack)
2544 rtx before_arg = get_last_insn ();
2546 if (store_one_arg (&args[i], argblock, flags,
2547 adjusted_args_size.var != 0,
2548 reg_parm_stack_space)
2549 || (pass == 0
2550 && check_sibcall_argument_overlap (before_arg,
2551 &args[i], 1)))
2552 sibcall_failure = 1;
2554 if (flags & ECF_CONST
2555 && args[i].stack
2556 && args[i].value == args[i].stack)
2557 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
2558 gen_rtx_USE (VOIDmode,
2559 args[i].value),
2560 call_fusage);
2563 /* If we have a parm that is passed in registers but not in memory
2564 and whose alignment does not permit a direct copy into registers,
2565 make a group of pseudos that correspond to each register that we
2566 will later fill. */
2567 if (STRICT_ALIGNMENT)
2568 store_unaligned_arguments_into_pseudos (args, num_actuals);
2570 /* Now store any partially-in-registers parm.
2571 This is the last place a block-move can happen. */
2572 if (reg_parm_seen)
2573 for (i = 0; i < num_actuals; i++)
2574 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2576 rtx before_arg = get_last_insn ();
2578 if (store_one_arg (&args[i], argblock, flags,
2579 adjusted_args_size.var != 0,
2580 reg_parm_stack_space)
2581 || (pass == 0
2582 && check_sibcall_argument_overlap (before_arg,
2583 &args[i], 1)))
2584 sibcall_failure = 1;
2587 /* If we pushed args in forward order, perform stack alignment
2588 after pushing the last arg. */
2589 if (!PUSH_ARGS_REVERSED && argblock == 0)
2590 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2591 - unadjusted_args_size));
2593 /* If register arguments require space on the stack and stack space
2594 was not preallocated, allocate stack space here for arguments
2595 passed in registers. */
2596 #ifdef OUTGOING_REG_PARM_STACK_SPACE
2597 if (!ACCUMULATE_OUTGOING_ARGS
2598 && must_preallocate == 0 && reg_parm_stack_space > 0)
2599 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2600 #endif
2602 /* Pass the function the address in which to return a
2603 structure value. */
2604 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2606 structure_value_addr
2607 = convert_memory_address (Pmode, structure_value_addr);
2608 emit_move_insn (struct_value,
2609 force_reg (Pmode,
2610 force_operand (structure_value_addr,
2611 NULL_RTX)));
2613 if (REG_P (struct_value))
2614 use_reg (&call_fusage, struct_value);
2617 funexp = prepare_call_address (funexp, static_chain_value,
2618 &call_fusage, reg_parm_seen, pass == 0);
2620 load_register_parameters (args, num_actuals, &call_fusage, flags,
2621 pass == 0, &sibcall_failure);
2623 /* Save a pointer to the last insn before the call, so that we can
2624 later safely search backwards to find the CALL_INSN. */
2625 before_call = get_last_insn ();
2627 /* Set up next argument register. For sibling calls on machines
2628 with register windows this should be the incoming register. */
2629 #ifdef FUNCTION_INCOMING_ARG
2630 if (pass == 0)
2631 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
2632 void_type_node, 1);
2633 else
2634 #endif
2635 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
2636 void_type_node, 1);
2638 /* All arguments and registers used for the call must be set up by
2639 now! */
2641 /* Stack must be properly aligned now. */
2642 gcc_assert (!pass
2643 || !(stack_pointer_delta % preferred_unit_stack_boundary));
2645 /* Generate the actual call instruction. */
2646 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
2647 adjusted_args_size.constant, struct_value_size,
2648 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
2649 flags, & args_so_far);
2651 /* If a non-BLKmode value is returned at the most significant end
2652 of a register, shift the register right by the appropriate amount
2653 and update VALREG accordingly. BLKmode values are handled by the
2654 group load/store machinery below. */
2655 if (!structure_value_addr
2656 && !pcc_struct_value
2657 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2658 && targetm.calls.return_in_msb (TREE_TYPE (exp)))
2660 if (shift_return_value (TYPE_MODE (TREE_TYPE (exp)), false, valreg))
2661 sibcall_failure = 1;
2662 valreg = gen_rtx_REG (TYPE_MODE (TREE_TYPE (exp)), REGNO (valreg));
2665 /* If call is cse'able, make appropriate pair of reg-notes around it.
2666 Test valreg so we don't crash; may safely ignore `const'
2667 if return type is void. Disable for PARALLEL return values, because
2668 we have no way to move such values into a pseudo register. */
2669 if (pass && (flags & ECF_LIBCALL_BLOCK))
2671 rtx insns;
2672 rtx insn;
2673 bool failed = valreg == 0 || GET_CODE (valreg) == PARALLEL;
2675 insns = get_insns ();
2677 /* Expansion of block moves possibly introduced a loop that may
2678 not appear inside libcall block. */
2679 for (insn = insns; insn; insn = NEXT_INSN (insn))
2680 if (JUMP_P (insn))
2681 failed = true;
2683 if (failed)
2685 end_sequence ();
2686 emit_insn (insns);
2688 else
2690 rtx note = 0;
2691 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2693 /* Mark the return value as a pointer if needed. */
2694 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2695 mark_reg_pointer (temp,
2696 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
2698 end_sequence ();
2699 if (flag_unsafe_math_optimizations
2700 && fndecl
2701 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2702 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRT
2703 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTF
2704 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTL))
2705 note = gen_rtx_fmt_e (SQRT,
2706 GET_MODE (temp),
2707 args[0].initial_value);
2708 else
2710 /* Construct an "equal form" for the value which
2711 mentions all the arguments in order as well as
2712 the function name. */
2713 for (i = 0; i < num_actuals; i++)
2714 note = gen_rtx_EXPR_LIST (VOIDmode,
2715 args[i].initial_value, note);
2716 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2718 if (flags & ECF_PURE)
2719 note = gen_rtx_EXPR_LIST (VOIDmode,
2720 gen_rtx_USE (VOIDmode,
2721 gen_rtx_MEM (BLKmode,
2722 gen_rtx_SCRATCH (VOIDmode))),
2723 note);
2725 emit_libcall_block (insns, temp, valreg, note);
2727 valreg = temp;
2730 else if (pass && (flags & ECF_MALLOC))
2732 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2733 rtx last, insns;
2735 /* The return value from a malloc-like function is a pointer. */
2736 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2737 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
2739 emit_move_insn (temp, valreg);
2741 /* The return value from a malloc-like function can not alias
2742 anything else. */
2743 last = get_last_insn ();
2744 REG_NOTES (last) =
2745 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2747 /* Write out the sequence. */
2748 insns = get_insns ();
2749 end_sequence ();
2750 emit_insn (insns);
2751 valreg = temp;
2754 /* For calls to `setjmp', etc., inform flow.c it should complain
2755 if nonvolatile values are live. For functions that cannot return,
2756 inform flow that control does not fall through. */
2758 if ((flags & ECF_NORETURN) || pass == 0)
2760 /* The barrier must be emitted
2761 immediately after the CALL_INSN. Some ports emit more
2762 than just a CALL_INSN above, so we must search for it here. */
2764 rtx last = get_last_insn ();
2765 while (!CALL_P (last))
2767 last = PREV_INSN (last);
2768 /* There was no CALL_INSN? */
2769 gcc_assert (last != before_call);
2772 emit_barrier_after (last);
2774 /* Stack adjustments after a noreturn call are dead code.
2775 However when NO_DEFER_POP is in effect, we must preserve
2776 stack_pointer_delta. */
2777 if (inhibit_defer_pop == 0)
2779 stack_pointer_delta = old_stack_allocated;
2780 pending_stack_adjust = 0;
2784 /* If value type not void, return an rtx for the value. */
2786 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2787 || ignore)
2788 target = const0_rtx;
2789 else if (structure_value_addr)
2791 if (target == 0 || !MEM_P (target))
2793 target
2794 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2795 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2796 structure_value_addr));
2797 set_mem_attributes (target, exp, 1);
2800 else if (pcc_struct_value)
2802 /* This is the special C++ case where we need to
2803 know what the true target was. We take care to
2804 never use this value more than once in one expression. */
2805 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2806 copy_to_reg (valreg));
2807 set_mem_attributes (target, exp, 1);
2809 /* Handle calls that return values in multiple non-contiguous locations.
2810 The Irix 6 ABI has examples of this. */
2811 else if (GET_CODE (valreg) == PARALLEL)
2813 if (target == 0)
2815 /* This will only be assigned once, so it can be readonly. */
2816 tree nt = build_qualified_type (TREE_TYPE (exp),
2817 (TYPE_QUALS (TREE_TYPE (exp))
2818 | TYPE_QUAL_CONST));
2820 target = assign_temp (nt, 0, 1, 1);
2823 if (! rtx_equal_p (target, valreg))
2824 emit_group_store (target, valreg, TREE_TYPE (exp),
2825 int_size_in_bytes (TREE_TYPE (exp)));
2827 /* We can not support sibling calls for this case. */
2828 sibcall_failure = 1;
2830 else if (target
2831 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2832 && GET_MODE (target) == GET_MODE (valreg))
2834 /* TARGET and VALREG cannot be equal at this point because the
2835 latter would not have REG_FUNCTION_VALUE_P true, while the
2836 former would if it were referring to the same register.
2838 If they refer to the same register, this move will be a no-op,
2839 except when function inlining is being done. */
2840 emit_move_insn (target, valreg);
2842 /* If we are setting a MEM, this code must be executed. Since it is
2843 emitted after the call insn, sibcall optimization cannot be
2844 performed in that case. */
2845 if (MEM_P (target))
2846 sibcall_failure = 1;
2848 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2850 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
2852 /* We can not support sibling calls for this case. */
2853 sibcall_failure = 1;
2855 else
2856 target = copy_to_reg (valreg);
2858 if (targetm.calls.promote_function_return(funtype))
2860 /* If we promoted this return value, make the proper SUBREG.
2861 TARGET might be const0_rtx here, so be careful. */
2862 if (REG_P (target)
2863 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2864 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2866 tree type = TREE_TYPE (exp);
2867 int unsignedp = TYPE_UNSIGNED (type);
2868 int offset = 0;
2869 enum machine_mode pmode;
2871 pmode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
2872 /* If we don't promote as expected, something is wrong. */
2873 gcc_assert (GET_MODE (target) == pmode);
2875 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
2876 && (GET_MODE_SIZE (GET_MODE (target))
2877 > GET_MODE_SIZE (TYPE_MODE (type))))
2879 offset = GET_MODE_SIZE (GET_MODE (target))
2880 - GET_MODE_SIZE (TYPE_MODE (type));
2881 if (! BYTES_BIG_ENDIAN)
2882 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
2883 else if (! WORDS_BIG_ENDIAN)
2884 offset %= UNITS_PER_WORD;
2886 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
2887 SUBREG_PROMOTED_VAR_P (target) = 1;
2888 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
2892 /* If size of args is variable or this was a constructor call for a stack
2893 argument, restore saved stack-pointer value. */
2895 if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
2897 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2898 stack_pointer_delta = old_stack_pointer_delta;
2899 pending_stack_adjust = old_pending_adj;
2900 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2901 stack_arg_under_construction = old_stack_arg_under_construction;
2902 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2903 stack_usage_map = initial_stack_usage_map;
2904 sibcall_failure = 1;
2906 else if (ACCUMULATE_OUTGOING_ARGS && pass)
2908 #ifdef REG_PARM_STACK_SPACE
2909 if (save_area)
2910 restore_fixed_argument_area (save_area, argblock,
2911 high_to_save, low_to_save);
2912 #endif
2914 /* If we saved any argument areas, restore them. */
2915 for (i = 0; i < num_actuals; i++)
2916 if (args[i].save_area)
2918 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2919 rtx stack_area
2920 = gen_rtx_MEM (save_mode,
2921 memory_address (save_mode,
2922 XEXP (args[i].stack_slot, 0)));
2924 if (save_mode != BLKmode)
2925 emit_move_insn (stack_area, args[i].save_area);
2926 else
2927 emit_block_move (stack_area, args[i].save_area,
2928 GEN_INT (args[i].locate.size.constant),
2929 BLOCK_OP_CALL_PARM);
2932 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2933 stack_usage_map = initial_stack_usage_map;
2936 /* If this was alloca, record the new stack level for nonlocal gotos.
2937 Check for the handler slots since we might not have a save area
2938 for non-local gotos. */
2940 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
2941 update_nonlocal_goto_save_area ();
2943 /* Free up storage we no longer need. */
2944 for (i = 0; i < num_actuals; ++i)
2945 if (args[i].aligned_regs)
2946 free (args[i].aligned_regs);
2948 insns = get_insns ();
2949 end_sequence ();
2951 if (pass == 0)
2953 tail_call_insns = insns;
2955 /* Restore the pending stack adjustment now that we have
2956 finished generating the sibling call sequence. */
2958 pending_stack_adjust = save_pending_stack_adjust;
2959 stack_pointer_delta = save_stack_pointer_delta;
2961 /* Prepare arg structure for next iteration. */
2962 for (i = 0; i < num_actuals; i++)
2964 args[i].value = 0;
2965 args[i].aligned_regs = 0;
2966 args[i].stack = 0;
2969 sbitmap_free (stored_args_map);
2971 else
2973 normal_call_insns = insns;
2975 /* Verify that we've deallocated all the stack we used. */
2976 gcc_assert ((flags & ECF_NORETURN)
2977 || (old_stack_allocated
2978 == stack_pointer_delta - pending_stack_adjust));
2981 /* If something prevents making this a sibling call,
2982 zero out the sequence. */
2983 if (sibcall_failure)
2984 tail_call_insns = NULL_RTX;
2985 else
2986 break;
2989 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
2990 arguments too, as argument area is now clobbered by the call. */
2991 if (tail_call_insns)
2993 emit_insn (tail_call_insns);
2994 cfun->tail_call_emit = true;
2996 else
2997 emit_insn (normal_call_insns);
2999 currently_expanding_call--;
3001 /* If this function returns with the stack pointer depressed, ensure
3002 this block saves and restores the stack pointer, show it was
3003 changed, and adjust for any outgoing arg space. */
3004 if (flags & ECF_SP_DEPRESSED)
3006 clear_pending_stack_adjust ();
3007 emit_insn (gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx));
3008 emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3011 if (stack_usage_map_buf)
3012 free (stack_usage_map_buf);
3014 return target;
3017 /* A sibling call sequence invalidates any REG_EQUIV notes made for
3018 this function's incoming arguments.
3020 At the start of RTL generation we know the only REG_EQUIV notes
3021 in the rtl chain are those for incoming arguments, so we can look
3022 for REG_EQUIV notes between the start of the function and the
3023 NOTE_INSN_FUNCTION_BEG.
3025 This is (slight) overkill. We could keep track of the highest
3026 argument we clobber and be more selective in removing notes, but it
3027 does not seem to be worth the effort. */
3029 void
3030 fixup_tail_calls (void)
3032 rtx insn;
3034 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3036 /* There are never REG_EQUIV notes for the incoming arguments
3037 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
3038 if (NOTE_P (insn)
3039 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
3040 break;
3042 while (1)
3044 rtx note = find_reg_note (insn, REG_EQUIV, 0);
3045 if (note)
3047 /* Remove the note and keep looking at the notes for
3048 this insn. */
3049 remove_note (insn, note);
3050 continue;
3052 break;
3057 /* Traverse an argument list in VALUES and expand all complex
3058 arguments into their components. */
3059 static tree
3060 split_complex_values (tree values)
3062 tree p;
3064 /* Before allocating memory, check for the common case of no complex. */
3065 for (p = values; p; p = TREE_CHAIN (p))
3067 tree type = TREE_TYPE (TREE_VALUE (p));
3068 if (type && TREE_CODE (type) == COMPLEX_TYPE
3069 && targetm.calls.split_complex_arg (type))
3070 goto found;
3072 return values;
3074 found:
3075 values = copy_list (values);
3077 for (p = values; p; p = TREE_CHAIN (p))
3079 tree complex_value = TREE_VALUE (p);
3080 tree complex_type;
3082 complex_type = TREE_TYPE (complex_value);
3083 if (!complex_type)
3084 continue;
3086 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3087 && targetm.calls.split_complex_arg (complex_type))
3089 tree subtype;
3090 tree real, imag, next;
3092 subtype = TREE_TYPE (complex_type);
3093 complex_value = save_expr (complex_value);
3094 real = build1 (REALPART_EXPR, subtype, complex_value);
3095 imag = build1 (IMAGPART_EXPR, subtype, complex_value);
3097 TREE_VALUE (p) = real;
3098 next = TREE_CHAIN (p);
3099 imag = build_tree_list (NULL_TREE, imag);
3100 TREE_CHAIN (p) = imag;
3101 TREE_CHAIN (imag) = next;
3103 /* Skip the newly created node. */
3104 p = TREE_CHAIN (p);
3108 return values;
3111 /* Traverse a list of TYPES and expand all complex types into their
3112 components. */
3113 static tree
3114 split_complex_types (tree types)
3116 tree p;
3118 /* Before allocating memory, check for the common case of no complex. */
3119 for (p = types; p; p = TREE_CHAIN (p))
3121 tree type = TREE_VALUE (p);
3122 if (TREE_CODE (type) == COMPLEX_TYPE
3123 && targetm.calls.split_complex_arg (type))
3124 goto found;
3126 return types;
3128 found:
3129 types = copy_list (types);
3131 for (p = types; p; p = TREE_CHAIN (p))
3133 tree complex_type = TREE_VALUE (p);
3135 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3136 && targetm.calls.split_complex_arg (complex_type))
3138 tree next, imag;
3140 /* Rewrite complex type with component type. */
3141 TREE_VALUE (p) = TREE_TYPE (complex_type);
3142 next = TREE_CHAIN (p);
3144 /* Add another component type for the imaginary part. */
3145 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3146 TREE_CHAIN (p) = imag;
3147 TREE_CHAIN (imag) = next;
3149 /* Skip the newly created node. */
3150 p = TREE_CHAIN (p);
3154 return types;
3157 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3158 The RETVAL parameter specifies whether return value needs to be saved, other
3159 parameters are documented in the emit_library_call function below. */
3161 static rtx
3162 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3163 enum libcall_type fn_type,
3164 enum machine_mode outmode, int nargs, va_list p)
3166 /* Total size in bytes of all the stack-parms scanned so far. */
3167 struct args_size args_size;
3168 /* Size of arguments before any adjustments (such as rounding). */
3169 struct args_size original_args_size;
3170 int argnum;
3171 rtx fun;
3172 int inc;
3173 int count;
3174 rtx argblock = 0;
3175 CUMULATIVE_ARGS args_so_far;
3176 struct arg
3178 rtx value;
3179 enum machine_mode mode;
3180 rtx reg;
3181 int partial;
3182 struct locate_and_pad_arg_data locate;
3183 rtx save_area;
3185 struct arg *argvec;
3186 int old_inhibit_defer_pop = inhibit_defer_pop;
3187 rtx call_fusage = 0;
3188 rtx mem_value = 0;
3189 rtx valreg;
3190 int pcc_struct_value = 0;
3191 int struct_value_size = 0;
3192 int flags;
3193 int reg_parm_stack_space = 0;
3194 int needed;
3195 rtx before_call;
3196 tree tfom; /* type_for_mode (outmode, 0) */
3198 #ifdef REG_PARM_STACK_SPACE
3199 /* Define the boundary of the register parm stack space that needs to be
3200 save, if any. */
3201 int low_to_save, high_to_save;
3202 rtx save_area = 0; /* Place that it is saved. */
3203 #endif
3205 /* Size of the stack reserved for parameter registers. */
3206 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3207 char *initial_stack_usage_map = stack_usage_map;
3208 char *stack_usage_map_buf = NULL;
3210 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3212 #ifdef REG_PARM_STACK_SPACE
3213 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3214 #endif
3216 /* By default, library functions can not throw. */
3217 flags = ECF_NOTHROW;
3219 switch (fn_type)
3221 case LCT_NORMAL:
3222 break;
3223 case LCT_CONST:
3224 flags |= ECF_CONST;
3225 break;
3226 case LCT_PURE:
3227 flags |= ECF_PURE;
3228 break;
3229 case LCT_CONST_MAKE_BLOCK:
3230 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
3231 break;
3232 case LCT_PURE_MAKE_BLOCK:
3233 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
3234 break;
3235 case LCT_NORETURN:
3236 flags |= ECF_NORETURN;
3237 break;
3238 case LCT_THROW:
3239 flags = ECF_NORETURN;
3240 break;
3241 case LCT_RETURNS_TWICE:
3242 flags = ECF_RETURNS_TWICE;
3243 break;
3245 fun = orgfun;
3247 /* Ensure current function's preferred stack boundary is at least
3248 what we need. */
3249 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3250 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3252 /* If this kind of value comes back in memory,
3253 decide where in memory it should come back. */
3254 if (outmode != VOIDmode)
3256 tfom = lang_hooks.types.type_for_mode (outmode, 0);
3257 if (aggregate_value_p (tfom, 0))
3259 #ifdef PCC_STATIC_STRUCT_RETURN
3260 rtx pointer_reg
3261 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
3262 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3263 pcc_struct_value = 1;
3264 if (value == 0)
3265 value = gen_reg_rtx (outmode);
3266 #else /* not PCC_STATIC_STRUCT_RETURN */
3267 struct_value_size = GET_MODE_SIZE (outmode);
3268 if (value != 0 && MEM_P (value))
3269 mem_value = value;
3270 else
3271 mem_value = assign_temp (tfom, 0, 1, 1);
3272 #endif
3273 /* This call returns a big structure. */
3274 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3277 else
3278 tfom = void_type_node;
3280 /* ??? Unfinished: must pass the memory address as an argument. */
3282 /* Copy all the libcall-arguments out of the varargs data
3283 and into a vector ARGVEC.
3285 Compute how to pass each argument. We only support a very small subset
3286 of the full argument passing conventions to limit complexity here since
3287 library functions shouldn't have many args. */
3289 argvec = alloca ((nargs + 1) * sizeof (struct arg));
3290 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3292 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3293 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3294 #else
3295 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0, nargs);
3296 #endif
3298 args_size.constant = 0;
3299 args_size.var = 0;
3301 count = 0;
3303 /* Now we are about to start emitting insns that can be deleted
3304 if a libcall is deleted. */
3305 if (flags & ECF_LIBCALL_BLOCK)
3306 start_sequence ();
3308 push_temp_slots ();
3310 /* If there's a structure value address to be passed,
3311 either pass it in the special place, or pass it as an extra argument. */
3312 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3314 rtx addr = XEXP (mem_value, 0);
3316 nargs++;
3318 /* Make sure it is a reasonable operand for a move or push insn. */
3319 if (!REG_P (addr) && !MEM_P (addr)
3320 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3321 addr = force_operand (addr, NULL_RTX);
3323 argvec[count].value = addr;
3324 argvec[count].mode = Pmode;
3325 argvec[count].partial = 0;
3327 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3328 gcc_assert (targetm.calls.arg_partial_bytes (&args_so_far, Pmode,
3329 NULL_TREE, 1) == 0);
3331 locate_and_pad_parm (Pmode, NULL_TREE,
3332 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3334 #else
3335 argvec[count].reg != 0,
3336 #endif
3337 0, NULL_TREE, &args_size, &argvec[count].locate);
3339 if (argvec[count].reg == 0 || argvec[count].partial != 0
3340 || reg_parm_stack_space > 0)
3341 args_size.constant += argvec[count].locate.size.constant;
3343 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3345 count++;
3348 for (; count < nargs; count++)
3350 rtx val = va_arg (p, rtx);
3351 enum machine_mode mode = va_arg (p, enum machine_mode);
3353 /* We cannot convert the arg value to the mode the library wants here;
3354 must do it earlier where we know the signedness of the arg. */
3355 gcc_assert (mode != BLKmode
3356 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
3358 /* Make sure it is a reasonable operand for a move or push insn. */
3359 if (!REG_P (val) && !MEM_P (val)
3360 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3361 val = force_operand (val, NULL_RTX);
3363 if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1))
3365 rtx slot;
3366 int must_copy
3367 = !reference_callee_copied (&args_so_far, mode, NULL_TREE, 1);
3369 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3370 functions, so we have to pretend this isn't such a function. */
3371 if (flags & ECF_LIBCALL_BLOCK)
3373 rtx insns = get_insns ();
3374 end_sequence ();
3375 emit_insn (insns);
3377 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3379 /* If this was a CONST function, it is now PURE since
3380 it now reads memory. */
3381 if (flags & ECF_CONST)
3383 flags &= ~ECF_CONST;
3384 flags |= ECF_PURE;
3387 if (GET_MODE (val) == MEM && !must_copy)
3388 slot = val;
3389 else
3391 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
3392 0, 1, 1);
3393 emit_move_insn (slot, val);
3396 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3397 gen_rtx_USE (VOIDmode, slot),
3398 call_fusage);
3399 if (must_copy)
3400 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3401 gen_rtx_CLOBBER (VOIDmode,
3402 slot),
3403 call_fusage);
3405 mode = Pmode;
3406 val = force_operand (XEXP (slot, 0), NULL_RTX);
3409 argvec[count].value = val;
3410 argvec[count].mode = mode;
3412 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3414 argvec[count].partial
3415 = targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL_TREE, 1);
3417 locate_and_pad_parm (mode, NULL_TREE,
3418 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3420 #else
3421 argvec[count].reg != 0,
3422 #endif
3423 argvec[count].partial,
3424 NULL_TREE, &args_size, &argvec[count].locate);
3426 gcc_assert (!argvec[count].locate.size.var);
3428 if (argvec[count].reg == 0 || argvec[count].partial != 0
3429 || reg_parm_stack_space > 0)
3430 args_size.constant += argvec[count].locate.size.constant;
3432 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3435 /* If this machine requires an external definition for library
3436 functions, write one out. */
3437 assemble_external_libcall (fun);
3439 original_args_size = args_size;
3440 args_size.constant = (((args_size.constant
3441 + stack_pointer_delta
3442 + STACK_BYTES - 1)
3443 / STACK_BYTES
3444 * STACK_BYTES)
3445 - stack_pointer_delta);
3447 args_size.constant = MAX (args_size.constant,
3448 reg_parm_stack_space);
3450 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3451 args_size.constant -= reg_parm_stack_space;
3452 #endif
3454 if (args_size.constant > current_function_outgoing_args_size)
3455 current_function_outgoing_args_size = args_size.constant;
3457 if (ACCUMULATE_OUTGOING_ARGS)
3459 /* Since the stack pointer will never be pushed, it is possible for
3460 the evaluation of a parm to clobber something we have already
3461 written to the stack. Since most function calls on RISC machines
3462 do not use the stack, this is uncommon, but must work correctly.
3464 Therefore, we save any area of the stack that was already written
3465 and that we are using. Here we set up to do this by making a new
3466 stack usage map from the old one.
3468 Another approach might be to try to reorder the argument
3469 evaluations to avoid this conflicting stack usage. */
3471 needed = args_size.constant;
3473 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3474 /* Since we will be writing into the entire argument area, the
3475 map must be allocated for its entire size, not just the part that
3476 is the responsibility of the caller. */
3477 needed += reg_parm_stack_space;
3478 #endif
3480 #ifdef ARGS_GROW_DOWNWARD
3481 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3482 needed + 1);
3483 #else
3484 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3485 needed);
3486 #endif
3487 stack_usage_map_buf = xmalloc (highest_outgoing_arg_in_use);
3488 stack_usage_map = stack_usage_map_buf;
3490 if (initial_highest_arg_in_use)
3491 memcpy (stack_usage_map, initial_stack_usage_map,
3492 initial_highest_arg_in_use);
3494 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3495 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3496 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3497 needed = 0;
3499 /* We must be careful to use virtual regs before they're instantiated,
3500 and real regs afterwards. Loop optimization, for example, can create
3501 new libcalls after we've instantiated the virtual regs, and if we
3502 use virtuals anyway, they won't match the rtl patterns. */
3504 if (virtuals_instantiated)
3505 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3506 else
3507 argblock = virtual_outgoing_args_rtx;
3509 else
3511 if (!PUSH_ARGS)
3512 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3515 /* If we push args individually in reverse order, perform stack alignment
3516 before the first push (the last arg). */
3517 if (argblock == 0 && PUSH_ARGS_REVERSED)
3518 anti_adjust_stack (GEN_INT (args_size.constant
3519 - original_args_size.constant));
3521 if (PUSH_ARGS_REVERSED)
3523 inc = -1;
3524 argnum = nargs - 1;
3526 else
3528 inc = 1;
3529 argnum = 0;
3532 #ifdef REG_PARM_STACK_SPACE
3533 if (ACCUMULATE_OUTGOING_ARGS)
3535 /* The argument list is the property of the called routine and it
3536 may clobber it. If the fixed area has been used for previous
3537 parameters, we must save and restore it. */
3538 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3539 &low_to_save, &high_to_save);
3541 #endif
3543 /* Push the args that need to be pushed. */
3545 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3546 are to be pushed. */
3547 for (count = 0; count < nargs; count++, argnum += inc)
3549 enum machine_mode mode = argvec[argnum].mode;
3550 rtx val = argvec[argnum].value;
3551 rtx reg = argvec[argnum].reg;
3552 int partial = argvec[argnum].partial;
3553 int lower_bound = 0, upper_bound = 0, i;
3555 if (! (reg != 0 && partial == 0))
3557 if (ACCUMULATE_OUTGOING_ARGS)
3559 /* If this is being stored into a pre-allocated, fixed-size,
3560 stack area, save any previous data at that location. */
3562 #ifdef ARGS_GROW_DOWNWARD
3563 /* stack_slot is negative, but we want to index stack_usage_map
3564 with positive values. */
3565 upper_bound = -argvec[argnum].locate.offset.constant + 1;
3566 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3567 #else
3568 lower_bound = argvec[argnum].locate.offset.constant;
3569 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3570 #endif
3572 i = lower_bound;
3573 /* Don't worry about things in the fixed argument area;
3574 it has already been saved. */
3575 if (i < reg_parm_stack_space)
3576 i = reg_parm_stack_space;
3577 while (i < upper_bound && stack_usage_map[i] == 0)
3578 i++;
3580 if (i < upper_bound)
3582 /* We need to make a save area. */
3583 unsigned int size
3584 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3585 enum machine_mode save_mode
3586 = mode_for_size (size, MODE_INT, 1);
3587 rtx adr
3588 = plus_constant (argblock,
3589 argvec[argnum].locate.offset.constant);
3590 rtx stack_area
3591 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
3593 if (save_mode == BLKmode)
3595 argvec[argnum].save_area
3596 = assign_stack_temp (BLKmode,
3597 argvec[argnum].locate.size.constant,
3600 emit_block_move (validize_mem (argvec[argnum].save_area),
3601 stack_area,
3602 GEN_INT (argvec[argnum].locate.size.constant),
3603 BLOCK_OP_CALL_PARM);
3605 else
3607 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3609 emit_move_insn (argvec[argnum].save_area, stack_area);
3614 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
3615 partial, reg, 0, argblock,
3616 GEN_INT (argvec[argnum].locate.offset.constant),
3617 reg_parm_stack_space,
3618 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
3620 /* Now mark the segment we just used. */
3621 if (ACCUMULATE_OUTGOING_ARGS)
3622 for (i = lower_bound; i < upper_bound; i++)
3623 stack_usage_map[i] = 1;
3625 NO_DEFER_POP;
3627 if (flags & ECF_CONST)
3629 rtx use;
3631 /* Indicate argument access so that alias.c knows that these
3632 values are live. */
3633 if (argblock)
3634 use = plus_constant (argblock,
3635 argvec[argnum].locate.offset.constant);
3636 else
3637 /* When arguemnts are pushed, trying to tell alias.c where
3638 exactly this argument is won't work, because the
3639 auto-increment causes confusion. So we merely indicate
3640 that we access something with a known mode somewhere on
3641 the stack. */
3642 use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3643 gen_rtx_SCRATCH (Pmode));
3644 use = gen_rtx_MEM (argvec[argnum].mode, use);
3645 use = gen_rtx_USE (VOIDmode, use);
3646 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
3651 /* If we pushed args in forward order, perform stack alignment
3652 after pushing the last arg. */
3653 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3654 anti_adjust_stack (GEN_INT (args_size.constant
3655 - original_args_size.constant));
3657 if (PUSH_ARGS_REVERSED)
3658 argnum = nargs - 1;
3659 else
3660 argnum = 0;
3662 fun = prepare_call_address (fun, NULL, &call_fusage, 0, 0);
3664 /* Now load any reg parms into their regs. */
3666 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3667 are to be pushed. */
3668 for (count = 0; count < nargs; count++, argnum += inc)
3670 enum machine_mode mode = argvec[argnum].mode;
3671 rtx val = argvec[argnum].value;
3672 rtx reg = argvec[argnum].reg;
3673 int partial = argvec[argnum].partial;
3675 /* Handle calls that pass values in multiple non-contiguous
3676 locations. The PA64 has examples of this for library calls. */
3677 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3678 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3679 else if (reg != 0 && partial == 0)
3680 emit_move_insn (reg, val);
3682 NO_DEFER_POP;
3685 /* Any regs containing parms remain in use through the call. */
3686 for (count = 0; count < nargs; count++)
3688 rtx reg = argvec[count].reg;
3689 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3690 use_group_regs (&call_fusage, reg);
3691 else if (reg != 0)
3692 use_reg (&call_fusage, reg);
3695 /* Pass the function the address in which to return a structure value. */
3696 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3698 emit_move_insn (struct_value,
3699 force_reg (Pmode,
3700 force_operand (XEXP (mem_value, 0),
3701 NULL_RTX)));
3702 if (REG_P (struct_value))
3703 use_reg (&call_fusage, struct_value);
3706 /* Don't allow popping to be deferred, since then
3707 cse'ing of library calls could delete a call and leave the pop. */
3708 NO_DEFER_POP;
3709 valreg = (mem_value == 0 && outmode != VOIDmode
3710 ? hard_libcall_value (outmode) : NULL_RTX);
3712 /* Stack must be properly aligned now. */
3713 gcc_assert (!(stack_pointer_delta
3714 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
3716 before_call = get_last_insn ();
3718 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3719 will set inhibit_defer_pop to that value. */
3720 /* The return type is needed to decide how many bytes the function pops.
3721 Signedness plays no role in that, so for simplicity, we pretend it's
3722 always signed. We also assume that the list of arguments passed has
3723 no impact, so we pretend it is unknown. */
3725 emit_call_1 (fun, NULL,
3726 get_identifier (XSTR (orgfun, 0)),
3727 build_function_type (tfom, NULL_TREE),
3728 original_args_size.constant, args_size.constant,
3729 struct_value_size,
3730 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3731 valreg,
3732 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
3734 /* For calls to `setjmp', etc., inform flow.c it should complain
3735 if nonvolatile values are live. For functions that cannot return,
3736 inform flow that control does not fall through. */
3738 if (flags & ECF_NORETURN)
3740 /* The barrier note must be emitted
3741 immediately after the CALL_INSN. Some ports emit more than
3742 just a CALL_INSN above, so we must search for it here. */
3744 rtx last = get_last_insn ();
3745 while (!CALL_P (last))
3747 last = PREV_INSN (last);
3748 /* There was no CALL_INSN? */
3749 gcc_assert (last != before_call);
3752 emit_barrier_after (last);
3755 /* Now restore inhibit_defer_pop to its actual original value. */
3756 OK_DEFER_POP;
3758 /* If call is cse'able, make appropriate pair of reg-notes around it.
3759 Test valreg so we don't crash; may safely ignore `const'
3760 if return type is void. Disable for PARALLEL return values, because
3761 we have no way to move such values into a pseudo register. */
3762 if (flags & ECF_LIBCALL_BLOCK)
3764 rtx insns;
3766 if (valreg == 0)
3768 insns = get_insns ();
3769 end_sequence ();
3770 emit_insn (insns);
3772 else
3774 rtx note = 0;
3775 rtx temp;
3776 int i;
3778 if (GET_CODE (valreg) == PARALLEL)
3780 temp = gen_reg_rtx (outmode);
3781 emit_group_store (temp, valreg, NULL_TREE,
3782 GET_MODE_SIZE (outmode));
3783 valreg = temp;
3786 temp = gen_reg_rtx (GET_MODE (valreg));
3788 /* Construct an "equal form" for the value which mentions all the
3789 arguments in order as well as the function name. */
3790 for (i = 0; i < nargs; i++)
3791 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
3792 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
3794 insns = get_insns ();
3795 end_sequence ();
3797 if (flags & ECF_PURE)
3798 note = gen_rtx_EXPR_LIST (VOIDmode,
3799 gen_rtx_USE (VOIDmode,
3800 gen_rtx_MEM (BLKmode,
3801 gen_rtx_SCRATCH (VOIDmode))),
3802 note);
3804 emit_libcall_block (insns, temp, valreg, note);
3806 valreg = temp;
3809 pop_temp_slots ();
3811 /* Copy the value to the right place. */
3812 if (outmode != VOIDmode && retval)
3814 if (mem_value)
3816 if (value == 0)
3817 value = mem_value;
3818 if (value != mem_value)
3819 emit_move_insn (value, mem_value);
3821 else if (GET_CODE (valreg) == PARALLEL)
3823 if (value == 0)
3824 value = gen_reg_rtx (outmode);
3825 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
3827 else if (value != 0)
3828 emit_move_insn (value, valreg);
3829 else
3830 value = valreg;
3833 if (ACCUMULATE_OUTGOING_ARGS)
3835 #ifdef REG_PARM_STACK_SPACE
3836 if (save_area)
3837 restore_fixed_argument_area (save_area, argblock,
3838 high_to_save, low_to_save);
3839 #endif
3841 /* If we saved any argument areas, restore them. */
3842 for (count = 0; count < nargs; count++)
3843 if (argvec[count].save_area)
3845 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3846 rtx adr = plus_constant (argblock,
3847 argvec[count].locate.offset.constant);
3848 rtx stack_area = gen_rtx_MEM (save_mode,
3849 memory_address (save_mode, adr));
3851 if (save_mode == BLKmode)
3852 emit_block_move (stack_area,
3853 validize_mem (argvec[count].save_area),
3854 GEN_INT (argvec[count].locate.size.constant),
3855 BLOCK_OP_CALL_PARM);
3856 else
3857 emit_move_insn (stack_area, argvec[count].save_area);
3860 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3861 stack_usage_map = initial_stack_usage_map;
3864 if (stack_usage_map_buf)
3865 free (stack_usage_map_buf);
3867 return value;
3871 /* Output a library call to function FUN (a SYMBOL_REF rtx)
3872 (emitting the queue unless NO_QUEUE is nonzero),
3873 for a value of mode OUTMODE,
3874 with NARGS different arguments, passed as alternating rtx values
3875 and machine_modes to convert them to.
3877 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
3878 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
3879 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
3880 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
3881 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
3882 or other LCT_ value for other types of library calls. */
3884 void
3885 emit_library_call (rtx orgfun, enum libcall_type fn_type,
3886 enum machine_mode outmode, int nargs, ...)
3888 va_list p;
3890 va_start (p, nargs);
3891 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
3892 va_end (p);
3895 /* Like emit_library_call except that an extra argument, VALUE,
3896 comes second and says where to store the result.
3897 (If VALUE is zero, this function chooses a convenient way
3898 to return the value.
3900 This function returns an rtx for where the value is to be found.
3901 If VALUE is nonzero, VALUE is returned. */
3904 emit_library_call_value (rtx orgfun, rtx value,
3905 enum libcall_type fn_type,
3906 enum machine_mode outmode, int nargs, ...)
3908 rtx result;
3909 va_list p;
3911 va_start (p, nargs);
3912 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
3913 nargs, p);
3914 va_end (p);
3916 return result;
3919 /* Store a single argument for a function call
3920 into the register or memory area where it must be passed.
3921 *ARG describes the argument value and where to pass it.
3923 ARGBLOCK is the address of the stack-block for all the arguments,
3924 or 0 on a machine where arguments are pushed individually.
3926 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3927 so must be careful about how the stack is used.
3929 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3930 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3931 that we need not worry about saving and restoring the stack.
3933 FNDECL is the declaration of the function we are calling.
3935 Return nonzero if this arg should cause sibcall failure,
3936 zero otherwise. */
3938 static int
3939 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
3940 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
3942 tree pval = arg->tree_value;
3943 rtx reg = 0;
3944 int partial = 0;
3945 int used = 0;
3946 int i, lower_bound = 0, upper_bound = 0;
3947 int sibcall_failure = 0;
3949 if (TREE_CODE (pval) == ERROR_MARK)
3950 return 1;
3952 /* Push a new temporary level for any temporaries we make for
3953 this argument. */
3954 push_temp_slots ();
3956 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
3958 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3959 save any previous data at that location. */
3960 if (argblock && ! variable_size && arg->stack)
3962 #ifdef ARGS_GROW_DOWNWARD
3963 /* stack_slot is negative, but we want to index stack_usage_map
3964 with positive values. */
3965 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3966 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3967 else
3968 upper_bound = 0;
3970 lower_bound = upper_bound - arg->locate.size.constant;
3971 #else
3972 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3973 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3974 else
3975 lower_bound = 0;
3977 upper_bound = lower_bound + arg->locate.size.constant;
3978 #endif
3980 i = lower_bound;
3981 /* Don't worry about things in the fixed argument area;
3982 it has already been saved. */
3983 if (i < reg_parm_stack_space)
3984 i = reg_parm_stack_space;
3985 while (i < upper_bound && stack_usage_map[i] == 0)
3986 i++;
3988 if (i < upper_bound)
3990 /* We need to make a save area. */
3991 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
3992 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
3993 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
3994 rtx stack_area = gen_rtx_MEM (save_mode, adr);
3996 if (save_mode == BLKmode)
3998 tree ot = TREE_TYPE (arg->tree_value);
3999 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4000 | TYPE_QUAL_CONST));
4002 arg->save_area = assign_temp (nt, 0, 1, 1);
4003 preserve_temp_slots (arg->save_area);
4004 emit_block_move (validize_mem (arg->save_area), stack_area,
4005 expr_size (arg->tree_value),
4006 BLOCK_OP_CALL_PARM);
4008 else
4010 arg->save_area = gen_reg_rtx (save_mode);
4011 emit_move_insn (arg->save_area, stack_area);
4017 /* If this isn't going to be placed on both the stack and in registers,
4018 set up the register and number of words. */
4019 if (! arg->pass_on_stack)
4021 if (flags & ECF_SIBCALL)
4022 reg = arg->tail_call_reg;
4023 else
4024 reg = arg->reg;
4025 partial = arg->partial;
4028 /* Being passed entirely in a register. We shouldn't be called in
4029 this case. */
4030 gcc_assert (reg == 0 || partial != 0);
4032 /* If this arg needs special alignment, don't load the registers
4033 here. */
4034 if (arg->n_aligned_regs != 0)
4035 reg = 0;
4037 /* If this is being passed partially in a register, we can't evaluate
4038 it directly into its stack slot. Otherwise, we can. */
4039 if (arg->value == 0)
4041 /* stack_arg_under_construction is nonzero if a function argument is
4042 being evaluated directly into the outgoing argument list and
4043 expand_call must take special action to preserve the argument list
4044 if it is called recursively.
4046 For scalar function arguments stack_usage_map is sufficient to
4047 determine which stack slots must be saved and restored. Scalar
4048 arguments in general have pass_on_stack == 0.
4050 If this argument is initialized by a function which takes the
4051 address of the argument (a C++ constructor or a C function
4052 returning a BLKmode structure), then stack_usage_map is
4053 insufficient and expand_call must push the stack around the
4054 function call. Such arguments have pass_on_stack == 1.
4056 Note that it is always safe to set stack_arg_under_construction,
4057 but this generates suboptimal code if set when not needed. */
4059 if (arg->pass_on_stack)
4060 stack_arg_under_construction++;
4062 arg->value = expand_expr (pval,
4063 (partial
4064 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4065 ? NULL_RTX : arg->stack,
4066 VOIDmode, EXPAND_STACK_PARM);
4068 /* If we are promoting object (or for any other reason) the mode
4069 doesn't agree, convert the mode. */
4071 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4072 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4073 arg->value, arg->unsignedp);
4075 if (arg->pass_on_stack)
4076 stack_arg_under_construction--;
4079 /* Don't allow anything left on stack from computation
4080 of argument to alloca. */
4081 if (flags & ECF_MAY_BE_ALLOCA)
4082 do_pending_stack_adjust ();
4084 if (arg->value == arg->stack)
4085 /* If the value is already in the stack slot, we are done. */
4087 else if (arg->mode != BLKmode)
4089 int size;
4091 /* Argument is a scalar, not entirely passed in registers.
4092 (If part is passed in registers, arg->partial says how much
4093 and emit_push_insn will take care of putting it there.)
4095 Push it, and if its size is less than the
4096 amount of space allocated to it,
4097 also bump stack pointer by the additional space.
4098 Note that in C the default argument promotions
4099 will prevent such mismatches. */
4101 size = GET_MODE_SIZE (arg->mode);
4102 /* Compute how much space the push instruction will push.
4103 On many machines, pushing a byte will advance the stack
4104 pointer by a halfword. */
4105 #ifdef PUSH_ROUNDING
4106 size = PUSH_ROUNDING (size);
4107 #endif
4108 used = size;
4110 /* Compute how much space the argument should get:
4111 round up to a multiple of the alignment for arguments. */
4112 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4113 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4114 / (PARM_BOUNDARY / BITS_PER_UNIT))
4115 * (PARM_BOUNDARY / BITS_PER_UNIT));
4117 /* This isn't already where we want it on the stack, so put it there.
4118 This can either be done with push or copy insns. */
4119 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4120 PARM_BOUNDARY, partial, reg, used - size, argblock,
4121 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4122 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4124 /* Unless this is a partially-in-register argument, the argument is now
4125 in the stack. */
4126 if (partial == 0)
4127 arg->value = arg->stack;
4129 else
4131 /* BLKmode, at least partly to be pushed. */
4133 unsigned int parm_align;
4134 int excess;
4135 rtx size_rtx;
4137 /* Pushing a nonscalar.
4138 If part is passed in registers, PARTIAL says how much
4139 and emit_push_insn will take care of putting it there. */
4141 /* Round its size up to a multiple
4142 of the allocation unit for arguments. */
4144 if (arg->locate.size.var != 0)
4146 excess = 0;
4147 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4149 else
4151 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4152 for BLKmode is careful to avoid it. */
4153 excess = (arg->locate.size.constant
4154 - int_size_in_bytes (TREE_TYPE (pval))
4155 + partial);
4156 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4157 NULL_RTX, TYPE_MODE (sizetype), 0);
4160 parm_align = arg->locate.boundary;
4162 /* When an argument is padded down, the block is aligned to
4163 PARM_BOUNDARY, but the actual argument isn't. */
4164 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4166 if (arg->locate.size.var)
4167 parm_align = BITS_PER_UNIT;
4168 else if (excess)
4170 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4171 parm_align = MIN (parm_align, excess_align);
4175 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4177 /* emit_push_insn might not work properly if arg->value and
4178 argblock + arg->locate.offset areas overlap. */
4179 rtx x = arg->value;
4180 int i = 0;
4182 if (XEXP (x, 0) == current_function_internal_arg_pointer
4183 || (GET_CODE (XEXP (x, 0)) == PLUS
4184 && XEXP (XEXP (x, 0), 0) ==
4185 current_function_internal_arg_pointer
4186 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4188 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4189 i = INTVAL (XEXP (XEXP (x, 0), 1));
4191 /* expand_call should ensure this. */
4192 gcc_assert (!arg->locate.offset.var
4193 && GET_CODE (size_rtx) == CONST_INT);
4195 if (arg->locate.offset.constant > i)
4197 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4198 sibcall_failure = 1;
4200 else if (arg->locate.offset.constant < i)
4202 if (i < arg->locate.offset.constant + INTVAL (size_rtx))
4203 sibcall_failure = 1;
4208 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4209 parm_align, partial, reg, excess, argblock,
4210 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4211 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4213 /* Unless this is a partially-in-register argument, the argument is now
4214 in the stack.
4216 ??? Unlike the case above, in which we want the actual
4217 address of the data, so that we can load it directly into a
4218 register, here we want the address of the stack slot, so that
4219 it's properly aligned for word-by-word copying or something
4220 like that. It's not clear that this is always correct. */
4221 if (partial == 0)
4222 arg->value = arg->stack_slot;
4225 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
4227 tree type = TREE_TYPE (arg->tree_value);
4228 arg->parallel_value
4229 = emit_group_load_into_temps (arg->reg, arg->value, type,
4230 int_size_in_bytes (type));
4233 /* Mark all slots this store used. */
4234 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4235 && argblock && ! variable_size && arg->stack)
4236 for (i = lower_bound; i < upper_bound; i++)
4237 stack_usage_map[i] = 1;
4239 /* Once we have pushed something, pops can't safely
4240 be deferred during the rest of the arguments. */
4241 NO_DEFER_POP;
4243 /* Free any temporary slots made in processing this argument. Show
4244 that we might have taken the address of something and pushed that
4245 as an operand. */
4246 preserve_temp_slots (NULL_RTX);
4247 free_temp_slots ();
4248 pop_temp_slots ();
4250 return sibcall_failure;
4253 /* Nonzero if we do not know how to pass TYPE solely in registers. */
4255 bool
4256 must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
4257 tree type)
4259 if (!type)
4260 return false;
4262 /* If the type has variable size... */
4263 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4264 return true;
4266 /* If the type is marked as addressable (it is required
4267 to be constructed into the stack)... */
4268 if (TREE_ADDRESSABLE (type))
4269 return true;
4271 return false;
4274 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
4275 takes trailing padding of a structure into account. */
4276 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
4278 bool
4279 must_pass_in_stack_var_size_or_pad (enum machine_mode mode, tree type)
4281 if (!type)
4282 return false;
4284 /* If the type has variable size... */
4285 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4286 return true;
4288 /* If the type is marked as addressable (it is required
4289 to be constructed into the stack)... */
4290 if (TREE_ADDRESSABLE (type))
4291 return true;
4293 /* If the padding and mode of the type is such that a copy into
4294 a register would put it into the wrong part of the register. */
4295 if (mode == BLKmode
4296 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4297 && (FUNCTION_ARG_PADDING (mode, type)
4298 == (BYTES_BIG_ENDIAN ? upward : downward)))
4299 return true;
4301 return false;