Mark ChangeLog
[official-gcc.git] / gcc / calls.c
blob1ff0ceca3ceae99f6aa3bcbe6dfb972ca76ea6c5
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "flags.h"
30 #include "expr.h"
31 #include "optabs.h"
32 #include "libfuncs.h"
33 #include "function.h"
34 #include "regs.h"
35 #include "toplev.h"
36 #include "output.h"
37 #include "tm_p.h"
38 #include "timevar.h"
39 #include "sbitmap.h"
40 #include "langhooks.h"
41 #include "target.h"
42 #include "cgraph.h"
43 #include "except.h"
45 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
46 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
48 /* Data structure and subroutines used within expand_call. */
50 struct arg_data
52 /* Tree node for this argument. */
53 tree tree_value;
54 /* Mode for value; TYPE_MODE unless promoted. */
55 enum machine_mode mode;
56 /* Current RTL value for argument, or 0 if it isn't precomputed. */
57 rtx value;
58 /* Initially-compute RTL value for argument; only for const functions. */
59 rtx initial_value;
60 /* Register to pass this argument in, 0 if passed on stack, or an
61 PARALLEL if the arg is to be copied into multiple non-contiguous
62 registers. */
63 rtx reg;
64 /* Register to pass this argument in when generating tail call sequence.
65 This is not the same register as for normal calls on machines with
66 register windows. */
67 rtx tail_call_reg;
68 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
69 form for emit_group_move. */
70 rtx parallel_value;
71 /* If REG was promoted from the actual mode of the argument expression,
72 indicates whether the promotion is sign- or zero-extended. */
73 int unsignedp;
74 /* Number of registers to use. 0 means put the whole arg in registers.
75 Also 0 if not passed in registers. */
76 int partial;
77 /* Nonzero if argument must be passed on stack.
78 Note that some arguments may be passed on the stack
79 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
80 pass_on_stack identifies arguments that *cannot* go in registers. */
81 int pass_on_stack;
82 /* Some fields packaged up for locate_and_pad_parm. */
83 struct locate_and_pad_arg_data locate;
84 /* Location on the stack at which parameter should be stored. The store
85 has already been done if STACK == VALUE. */
86 rtx stack;
87 /* Location on the stack of the start of this argument slot. This can
88 differ from STACK if this arg pads downward. This location is known
89 to be aligned to FUNCTION_ARG_BOUNDARY. */
90 rtx stack_slot;
91 /* Place that this stack area has been saved, if needed. */
92 rtx save_area;
93 /* If an argument's alignment does not permit direct copying into registers,
94 copy in smaller-sized pieces into pseudos. These are stored in a
95 block pointed to by this field. The next field says how many
96 word-sized pseudos we made. */
97 rtx *aligned_regs;
98 int n_aligned_regs;
101 /* A vector of one char per byte of stack space. A byte if nonzero if
102 the corresponding stack location has been used.
103 This vector is used to prevent a function call within an argument from
104 clobbering any stack already set up. */
105 static char *stack_usage_map;
107 /* Size of STACK_USAGE_MAP. */
108 static int highest_outgoing_arg_in_use;
110 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
111 stack location's tail call argument has been already stored into the stack.
112 This bitmap is used to prevent sibling call optimization if function tries
113 to use parent's incoming argument slots when they have been already
114 overwritten with tail call arguments. */
115 static sbitmap stored_args_map;
117 /* stack_arg_under_construction is nonzero when an argument may be
118 initialized with a constructor call (including a C function that
119 returns a BLKmode struct) and expand_call must take special action
120 to make sure the object being constructed does not overlap the
121 argument list for the constructor call. */
122 int stack_arg_under_construction;
124 static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
125 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
126 CUMULATIVE_ARGS *);
127 static void precompute_register_parameters (int, struct arg_data *, int *);
128 static int store_one_arg (struct arg_data *, rtx, int, int, int);
129 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
130 static int finalize_must_preallocate (int, int, struct arg_data *,
131 struct args_size *);
132 static void precompute_arguments (int, int, struct arg_data *);
133 static int compute_argument_block_size (int, struct args_size *, int);
134 static void initialize_argument_information (int, struct arg_data *,
135 struct args_size *, int, tree,
136 tree, CUMULATIVE_ARGS *, int,
137 rtx *, int *, int *, int *,
138 bool *, bool);
139 static void compute_argument_addresses (struct arg_data *, rtx, int);
140 static rtx rtx_for_function_call (tree, tree);
141 static void load_register_parameters (struct arg_data *, int, rtx *, int,
142 int, int *);
143 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
144 enum machine_mode, int, va_list);
145 static int special_function_p (tree, int);
146 static int check_sibcall_argument_overlap_1 (rtx);
147 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
149 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
150 unsigned int);
151 static tree split_complex_values (tree);
152 static tree split_complex_types (tree);
154 #ifdef REG_PARM_STACK_SPACE
155 static rtx save_fixed_argument_area (int, rtx, int *, int *);
156 static void restore_fixed_argument_area (rtx, rtx, int, int);
157 #endif
159 /* Force FUNEXP into a form suitable for the address of a CALL,
160 and return that as an rtx. Also load the static chain register
161 if FNDECL is a nested function.
163 CALL_FUSAGE points to a variable holding the prospective
164 CALL_INSN_FUNCTION_USAGE information. */
167 prepare_call_address (rtx funexp, rtx static_chain_value,
168 rtx *call_fusage, int reg_parm_seen, int sibcallp)
170 /* Make a valid memory address and copy constants through pseudo-regs,
171 but not for a constant address if -fno-function-cse. */
172 if (GET_CODE (funexp) != SYMBOL_REF)
173 /* If we are using registers for parameters, force the
174 function address into a register now. */
175 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
176 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
177 : memory_address (FUNCTION_MODE, funexp));
178 else if (! sibcallp)
180 #ifndef NO_FUNCTION_CSE
181 if (optimize && ! flag_no_function_cse)
182 funexp = force_reg (Pmode, funexp);
183 #endif
186 if (static_chain_value != 0)
188 static_chain_value = convert_memory_address (Pmode, static_chain_value);
189 emit_move_insn (static_chain_rtx, static_chain_value);
191 if (REG_P (static_chain_rtx))
192 use_reg (call_fusage, static_chain_rtx);
195 return funexp;
198 /* Generate instructions to call function FUNEXP,
199 and optionally pop the results.
200 The CALL_INSN is the first insn generated.
202 FNDECL is the declaration node of the function. This is given to the
203 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
205 FUNTYPE is the data type of the function. This is given to the macro
206 RETURN_POPS_ARGS to determine whether this function pops its own args.
207 We used to allow an identifier for library functions, but that doesn't
208 work when the return type is an aggregate type and the calling convention
209 says that the pointer to this aggregate is to be popped by the callee.
211 STACK_SIZE is the number of bytes of arguments on the stack,
212 ROUNDED_STACK_SIZE is that number rounded up to
213 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
214 both to put into the call insn and to generate explicit popping
215 code if necessary.
217 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
218 It is zero if this call doesn't want a structure value.
220 NEXT_ARG_REG is the rtx that results from executing
221 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
222 just after all the args have had their registers assigned.
223 This could be whatever you like, but normally it is the first
224 arg-register beyond those used for args in this call,
225 or 0 if all the arg-registers are used in this call.
226 It is passed on to `gen_call' so you can put this info in the call insn.
228 VALREG is a hard register in which a value is returned,
229 or 0 if the call does not return a value.
231 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
232 the args to this call were processed.
233 We restore `inhibit_defer_pop' to that value.
235 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
236 denote registers used by the called function. */
238 static void
239 emit_call_1 (rtx funexp, tree fntree, tree fndecl ATTRIBUTE_UNUSED,
240 tree funtype ATTRIBUTE_UNUSED,
241 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
242 HOST_WIDE_INT rounded_stack_size,
243 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
244 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
245 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
246 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
248 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
249 rtx call_insn;
250 int already_popped = 0;
251 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
252 #if defined (HAVE_call) && defined (HAVE_call_value)
253 rtx struct_value_size_rtx;
254 struct_value_size_rtx = GEN_INT (struct_value_size);
255 #endif
257 #ifdef CALL_POPS_ARGS
258 n_popped += CALL_POPS_ARGS (* args_so_far);
259 #endif
261 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
262 and we don't want to load it into a register as an optimization,
263 because prepare_call_address already did it if it should be done. */
264 if (GET_CODE (funexp) != SYMBOL_REF)
265 funexp = memory_address (FUNCTION_MODE, funexp);
267 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
268 if ((ecf_flags & ECF_SIBCALL)
269 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
270 && (n_popped > 0 || stack_size == 0))
272 rtx n_pop = GEN_INT (n_popped);
273 rtx pat;
275 /* If this subroutine pops its own args, record that in the call insn
276 if possible, for the sake of frame pointer elimination. */
278 if (valreg)
279 pat = GEN_SIBCALL_VALUE_POP (valreg,
280 gen_rtx_MEM (FUNCTION_MODE, funexp),
281 rounded_stack_size_rtx, next_arg_reg,
282 n_pop);
283 else
284 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
285 rounded_stack_size_rtx, next_arg_reg, n_pop);
287 emit_call_insn (pat);
288 already_popped = 1;
290 else
291 #endif
293 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
294 /* If the target has "call" or "call_value" insns, then prefer them
295 if no arguments are actually popped. If the target does not have
296 "call" or "call_value" insns, then we must use the popping versions
297 even if the call has no arguments to pop. */
298 #if defined (HAVE_call) && defined (HAVE_call_value)
299 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
300 && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
301 #else
302 if (HAVE_call_pop && HAVE_call_value_pop)
303 #endif
305 rtx n_pop = GEN_INT (n_popped);
306 rtx pat;
308 /* If this subroutine pops its own args, record that in the call insn
309 if possible, for the sake of frame pointer elimination. */
311 if (valreg)
312 pat = GEN_CALL_VALUE_POP (valreg,
313 gen_rtx_MEM (FUNCTION_MODE, funexp),
314 rounded_stack_size_rtx, next_arg_reg, n_pop);
315 else
316 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
317 rounded_stack_size_rtx, next_arg_reg, n_pop);
319 emit_call_insn (pat);
320 already_popped = 1;
322 else
323 #endif
325 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
326 if ((ecf_flags & ECF_SIBCALL)
327 && HAVE_sibcall && HAVE_sibcall_value)
329 if (valreg)
330 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
331 gen_rtx_MEM (FUNCTION_MODE, funexp),
332 rounded_stack_size_rtx,
333 next_arg_reg, NULL_RTX));
334 else
335 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
336 rounded_stack_size_rtx, next_arg_reg,
337 struct_value_size_rtx));
339 else
340 #endif
342 #if defined (HAVE_call) && defined (HAVE_call_value)
343 if (HAVE_call && HAVE_call_value)
345 if (valreg)
346 emit_call_insn (GEN_CALL_VALUE (valreg,
347 gen_rtx_MEM (FUNCTION_MODE, funexp),
348 rounded_stack_size_rtx, next_arg_reg,
349 NULL_RTX));
350 else
351 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
352 rounded_stack_size_rtx, next_arg_reg,
353 struct_value_size_rtx));
355 else
356 #endif
357 gcc_unreachable ();
359 /* Find the call we just emitted. */
360 call_insn = last_call_insn ();
362 /* Mark memory as used for "pure" function call. */
363 if (ecf_flags & ECF_PURE)
364 call_fusage
365 = gen_rtx_EXPR_LIST
366 (VOIDmode,
367 gen_rtx_USE (VOIDmode,
368 gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
369 call_fusage);
371 /* Put the register usage information there. */
372 add_function_usage_to (call_insn, call_fusage);
374 /* If this is a const call, then set the insn's unchanging bit. */
375 if (ecf_flags & (ECF_CONST | ECF_PURE))
376 CONST_OR_PURE_CALL_P (call_insn) = 1;
378 /* If this call can't throw, attach a REG_EH_REGION reg note to that
379 effect. */
380 if (ecf_flags & ECF_NOTHROW)
381 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
382 REG_NOTES (call_insn));
383 else
385 int rn = lookup_stmt_eh_region (fntree);
387 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't
388 throw, which we already took care of. */
389 if (rn > 0)
390 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
391 REG_NOTES (call_insn));
392 note_current_region_may_contain_throw ();
395 if (ecf_flags & ECF_NORETURN)
396 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
397 REG_NOTES (call_insn));
398 if (ecf_flags & ECF_ALWAYS_RETURN)
399 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_ALWAYS_RETURN, const0_rtx,
400 REG_NOTES (call_insn));
402 if (ecf_flags & ECF_RETURNS_TWICE)
404 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
405 REG_NOTES (call_insn));
406 current_function_calls_setjmp = 1;
409 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
411 /* Restore this now, so that we do defer pops for this call's args
412 if the context of the call as a whole permits. */
413 inhibit_defer_pop = old_inhibit_defer_pop;
415 if (n_popped > 0)
417 if (!already_popped)
418 CALL_INSN_FUNCTION_USAGE (call_insn)
419 = gen_rtx_EXPR_LIST (VOIDmode,
420 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
421 CALL_INSN_FUNCTION_USAGE (call_insn));
422 rounded_stack_size -= n_popped;
423 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
424 stack_pointer_delta -= n_popped;
427 if (!ACCUMULATE_OUTGOING_ARGS)
429 /* If returning from the subroutine does not automatically pop the args,
430 we need an instruction to pop them sooner or later.
431 Perhaps do it now; perhaps just record how much space to pop later.
433 If returning from the subroutine does pop the args, indicate that the
434 stack pointer will be changed. */
436 if (rounded_stack_size != 0)
438 if (ecf_flags & (ECF_SP_DEPRESSED | ECF_NORETURN))
439 /* Just pretend we did the pop. */
440 stack_pointer_delta -= rounded_stack_size;
441 else if (flag_defer_pop && inhibit_defer_pop == 0
442 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
443 pending_stack_adjust += rounded_stack_size;
444 else
445 adjust_stack (rounded_stack_size_rtx);
448 /* When we accumulate outgoing args, we must avoid any stack manipulations.
449 Restore the stack pointer to its original value now. Usually
450 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
451 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
452 popping variants of functions exist as well.
454 ??? We may optimize similar to defer_pop above, but it is
455 probably not worthwhile.
457 ??? It will be worthwhile to enable combine_stack_adjustments even for
458 such machines. */
459 else if (n_popped)
460 anti_adjust_stack (GEN_INT (n_popped));
463 /* Determine if the function identified by NAME and FNDECL is one with
464 special properties we wish to know about.
466 For example, if the function might return more than one time (setjmp), then
467 set RETURNS_TWICE to a nonzero value.
469 Similarly set LONGJMP for if the function is in the longjmp family.
471 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
472 space from the stack such as alloca. */
474 static int
475 special_function_p (tree fndecl, int flags)
477 if (fndecl && DECL_NAME (fndecl)
478 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
479 /* Exclude functions not at the file scope, or not `extern',
480 since they are not the magic functions we would otherwise
481 think they are.
482 FIXME: this should be handled with attributes, not with this
483 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
484 because you can declare fork() inside a function if you
485 wish. */
486 && (DECL_CONTEXT (fndecl) == NULL_TREE
487 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
488 && TREE_PUBLIC (fndecl))
490 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
491 const char *tname = name;
493 /* We assume that alloca will always be called by name. It
494 makes no sense to pass it as a pointer-to-function to
495 anything that does not understand its behavior. */
496 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
497 && name[0] == 'a'
498 && ! strcmp (name, "alloca"))
499 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
500 && name[0] == '_'
501 && ! strcmp (name, "__builtin_alloca"))))
502 flags |= ECF_MAY_BE_ALLOCA;
504 /* Disregard prefix _, __ or __x. */
505 if (name[0] == '_')
507 if (name[1] == '_' && name[2] == 'x')
508 tname += 3;
509 else if (name[1] == '_')
510 tname += 2;
511 else
512 tname += 1;
515 if (tname[0] == 's')
517 if ((tname[1] == 'e'
518 && (! strcmp (tname, "setjmp")
519 || ! strcmp (tname, "setjmp_syscall")))
520 || (tname[1] == 'i'
521 && ! strcmp (tname, "sigsetjmp"))
522 || (tname[1] == 'a'
523 && ! strcmp (tname, "savectx")))
524 flags |= ECF_RETURNS_TWICE;
526 if (tname[1] == 'i'
527 && ! strcmp (tname, "siglongjmp"))
528 flags |= ECF_NORETURN;
530 else if ((tname[0] == 'q' && tname[1] == 's'
531 && ! strcmp (tname, "qsetjmp"))
532 || (tname[0] == 'v' && tname[1] == 'f'
533 && ! strcmp (tname, "vfork"))
534 || (tname[0] == 'g' && tname[1] == 'e'
535 && !strcmp (tname, "getcontext")))
536 flags |= ECF_RETURNS_TWICE;
538 else if (tname[0] == 'l' && tname[1] == 'o'
539 && ! strcmp (tname, "longjmp"))
540 flags |= ECF_NORETURN;
543 return flags;
546 /* Return nonzero when tree represent call to longjmp. */
549 setjmp_call_p (tree fndecl)
551 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
554 /* Return true when exp contains alloca call. */
555 bool
556 alloca_call_p (tree exp)
558 if (TREE_CODE (exp) == CALL_EXPR
559 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
560 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
561 == FUNCTION_DECL)
562 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
563 0) & ECF_MAY_BE_ALLOCA))
564 return true;
565 return false;
568 /* Detect flags (function attributes) from the function decl or type node. */
571 flags_from_decl_or_type (tree exp)
573 int flags = 0;
574 tree type = exp;
576 if (DECL_P (exp))
578 struct cgraph_rtl_info *i = cgraph_rtl_info (exp);
579 type = TREE_TYPE (exp);
581 if (i)
583 if (i->pure_function)
584 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
585 if (i->const_function)
586 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
589 /* The function exp may have the `malloc' attribute. */
590 if (DECL_IS_MALLOC (exp))
591 flags |= ECF_MALLOC;
593 /* The function exp may have the `pure' attribute. */
594 if (DECL_IS_PURE (exp))
595 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
597 if (TREE_NOTHROW (exp))
598 flags |= ECF_NOTHROW;
600 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
601 flags |= ECF_LIBCALL_BLOCK | ECF_CONST;
603 flags = special_function_p (exp, flags);
605 else if (TYPE_P (exp) && TYPE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
606 flags |= ECF_CONST;
608 if (TREE_THIS_VOLATILE (exp))
609 flags |= ECF_NORETURN;
611 /* Mark if the function returns with the stack pointer depressed. We
612 cannot consider it pure or constant in that case. */
613 if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
615 flags |= ECF_SP_DEPRESSED;
616 flags &= ~(ECF_PURE | ECF_CONST | ECF_LIBCALL_BLOCK);
619 return flags;
622 /* Detect flags from a CALL_EXPR. */
625 call_expr_flags (tree t)
627 int flags;
628 tree decl = get_callee_fndecl (t);
630 if (decl)
631 flags = flags_from_decl_or_type (decl);
632 else
634 t = TREE_TYPE (TREE_OPERAND (t, 0));
635 if (t && TREE_CODE (t) == POINTER_TYPE)
636 flags = flags_from_decl_or_type (TREE_TYPE (t));
637 else
638 flags = 0;
641 return flags;
644 /* Precompute all register parameters as described by ARGS, storing values
645 into fields within the ARGS array.
647 NUM_ACTUALS indicates the total number elements in the ARGS array.
649 Set REG_PARM_SEEN if we encounter a register parameter. */
651 static void
652 precompute_register_parameters (int num_actuals, struct arg_data *args,
653 int *reg_parm_seen)
655 int i;
657 *reg_parm_seen = 0;
659 for (i = 0; i < num_actuals; i++)
660 if (args[i].reg != 0 && ! args[i].pass_on_stack)
662 *reg_parm_seen = 1;
664 if (args[i].value == 0)
666 push_temp_slots ();
667 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
668 VOIDmode, 0);
669 preserve_temp_slots (args[i].value);
670 pop_temp_slots ();
673 /* If the value is a non-legitimate constant, force it into a
674 pseudo now. TLS symbols sometimes need a call to resolve. */
675 if (CONSTANT_P (args[i].value)
676 && !LEGITIMATE_CONSTANT_P (args[i].value))
677 args[i].value = force_reg (args[i].mode, args[i].value);
679 /* If we are to promote the function arg to a wider mode,
680 do it now. */
682 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
683 args[i].value
684 = convert_modes (args[i].mode,
685 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
686 args[i].value, args[i].unsignedp);
688 /* If we're going to have to load the value by parts, pull the
689 parts into pseudos. The part extraction process can involve
690 non-trivial computation. */
691 if (GET_CODE (args[i].reg) == PARALLEL)
693 tree type = TREE_TYPE (args[i].tree_value);
694 args[i].parallel_value
695 = emit_group_load_into_temps (args[i].reg, args[i].value,
696 type, int_size_in_bytes (type));
699 /* If the value is expensive, and we are inside an appropriately
700 short loop, put the value into a pseudo and then put the pseudo
701 into the hard reg.
703 For small register classes, also do this if this call uses
704 register parameters. This is to avoid reload conflicts while
705 loading the parameters registers. */
707 else if ((! (REG_P (args[i].value)
708 || (GET_CODE (args[i].value) == SUBREG
709 && REG_P (SUBREG_REG (args[i].value)))))
710 && args[i].mode != BLKmode
711 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
712 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
713 || optimize))
714 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
718 #ifdef REG_PARM_STACK_SPACE
720 /* The argument list is the property of the called routine and it
721 may clobber it. If the fixed area has been used for previous
722 parameters, we must save and restore it. */
724 static rtx
725 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
727 int low;
728 int high;
730 /* Compute the boundary of the area that needs to be saved, if any. */
731 high = reg_parm_stack_space;
732 #ifdef ARGS_GROW_DOWNWARD
733 high += 1;
734 #endif
735 if (high > highest_outgoing_arg_in_use)
736 high = highest_outgoing_arg_in_use;
738 for (low = 0; low < high; low++)
739 if (stack_usage_map[low] != 0)
741 int num_to_save;
742 enum machine_mode save_mode;
743 int delta;
744 rtx stack_area;
745 rtx save_area;
747 while (stack_usage_map[--high] == 0)
750 *low_to_save = low;
751 *high_to_save = high;
753 num_to_save = high - low + 1;
754 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
756 /* If we don't have the required alignment, must do this
757 in BLKmode. */
758 if ((low & (MIN (GET_MODE_SIZE (save_mode),
759 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
760 save_mode = BLKmode;
762 #ifdef ARGS_GROW_DOWNWARD
763 delta = -high;
764 #else
765 delta = low;
766 #endif
767 stack_area = gen_rtx_MEM (save_mode,
768 memory_address (save_mode,
769 plus_constant (argblock,
770 delta)));
772 set_mem_align (stack_area, PARM_BOUNDARY);
773 if (save_mode == BLKmode)
775 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
776 emit_block_move (validize_mem (save_area), stack_area,
777 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
779 else
781 save_area = gen_reg_rtx (save_mode);
782 emit_move_insn (save_area, stack_area);
785 return save_area;
788 return NULL_RTX;
791 static void
792 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
794 enum machine_mode save_mode = GET_MODE (save_area);
795 int delta;
796 rtx stack_area;
798 #ifdef ARGS_GROW_DOWNWARD
799 delta = -high_to_save;
800 #else
801 delta = low_to_save;
802 #endif
803 stack_area = gen_rtx_MEM (save_mode,
804 memory_address (save_mode,
805 plus_constant (argblock, delta)));
806 set_mem_align (stack_area, PARM_BOUNDARY);
808 if (save_mode != BLKmode)
809 emit_move_insn (stack_area, save_area);
810 else
811 emit_block_move (stack_area, validize_mem (save_area),
812 GEN_INT (high_to_save - low_to_save + 1),
813 BLOCK_OP_CALL_PARM);
815 #endif /* REG_PARM_STACK_SPACE */
817 /* If any elements in ARGS refer to parameters that are to be passed in
818 registers, but not in memory, and whose alignment does not permit a
819 direct copy into registers. Copy the values into a group of pseudos
820 which we will later copy into the appropriate hard registers.
822 Pseudos for each unaligned argument will be stored into the array
823 args[argnum].aligned_regs. The caller is responsible for deallocating
824 the aligned_regs array if it is nonzero. */
826 static void
827 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
829 int i, j;
831 for (i = 0; i < num_actuals; i++)
832 if (args[i].reg != 0 && ! args[i].pass_on_stack
833 && args[i].mode == BLKmode
834 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
835 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
837 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
838 int endian_correction = 0;
840 if (args[i].partial)
842 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
843 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
845 else
847 args[i].n_aligned_regs
848 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
851 args[i].aligned_regs = xmalloc (sizeof (rtx) * args[i].n_aligned_regs);
853 /* Structures smaller than a word are normally aligned to the
854 least significant byte. On a BYTES_BIG_ENDIAN machine,
855 this means we must skip the empty high order bytes when
856 calculating the bit offset. */
857 if (bytes < UNITS_PER_WORD
858 #ifdef BLOCK_REG_PADDING
859 && (BLOCK_REG_PADDING (args[i].mode,
860 TREE_TYPE (args[i].tree_value), 1)
861 == downward)
862 #else
863 && BYTES_BIG_ENDIAN
864 #endif
866 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
868 for (j = 0; j < args[i].n_aligned_regs; j++)
870 rtx reg = gen_reg_rtx (word_mode);
871 rtx word = operand_subword_force (args[i].value, j, BLKmode);
872 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
874 args[i].aligned_regs[j] = reg;
875 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
876 word_mode, word_mode);
878 /* There is no need to restrict this code to loading items
879 in TYPE_ALIGN sized hunks. The bitfield instructions can
880 load up entire word sized registers efficiently.
882 ??? This may not be needed anymore.
883 We use to emit a clobber here but that doesn't let later
884 passes optimize the instructions we emit. By storing 0 into
885 the register later passes know the first AND to zero out the
886 bitfield being set in the register is unnecessary. The store
887 of 0 will be deleted as will at least the first AND. */
889 emit_move_insn (reg, const0_rtx);
891 bytes -= bitsize / BITS_PER_UNIT;
892 store_bit_field (reg, bitsize, endian_correction, word_mode,
893 word);
898 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
899 ACTPARMS.
901 NUM_ACTUALS is the total number of parameters.
903 N_NAMED_ARGS is the total number of named arguments.
905 FNDECL is the tree code for the target of this call (if known)
907 ARGS_SO_FAR holds state needed by the target to know where to place
908 the next argument.
910 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
911 for arguments which are passed in registers.
913 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
914 and may be modified by this routine.
916 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
917 flags which may may be modified by this routine.
919 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
920 that requires allocation of stack space.
922 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
923 the thunked-to function. */
925 static void
926 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
927 struct arg_data *args,
928 struct args_size *args_size,
929 int n_named_args ATTRIBUTE_UNUSED,
930 tree actparms, tree fndecl,
931 CUMULATIVE_ARGS *args_so_far,
932 int reg_parm_stack_space,
933 rtx *old_stack_level, int *old_pending_adj,
934 int *must_preallocate, int *ecf_flags,
935 bool *may_tailcall, bool call_from_thunk_p)
937 /* 1 if scanning parms front to back, -1 if scanning back to front. */
938 int inc;
940 /* Count arg position in order args appear. */
941 int argpos;
943 int i;
944 tree p;
946 args_size->constant = 0;
947 args_size->var = 0;
949 /* In this loop, we consider args in the order they are written.
950 We fill up ARGS from the front or from the back if necessary
951 so that in any case the first arg to be pushed ends up at the front. */
953 if (PUSH_ARGS_REVERSED)
955 i = num_actuals - 1, inc = -1;
956 /* In this case, must reverse order of args
957 so that we compute and push the last arg first. */
959 else
961 i = 0, inc = 1;
964 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
965 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
967 tree type = TREE_TYPE (TREE_VALUE (p));
968 int unsignedp;
969 enum machine_mode mode;
971 args[i].tree_value = TREE_VALUE (p);
973 /* Replace erroneous argument with constant zero. */
974 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
975 args[i].tree_value = integer_zero_node, type = integer_type_node;
977 /* If TYPE is a transparent union, pass things the way we would
978 pass the first field of the union. We have already verified that
979 the modes are the same. */
980 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
981 type = TREE_TYPE (TYPE_FIELDS (type));
983 /* Decide where to pass this arg.
985 args[i].reg is nonzero if all or part is passed in registers.
987 args[i].partial is nonzero if part but not all is passed in registers,
988 and the exact value says how many bytes are passed in registers.
990 args[i].pass_on_stack is nonzero if the argument must at least be
991 computed on the stack. It may then be loaded back into registers
992 if args[i].reg is nonzero.
994 These decisions are driven by the FUNCTION_... macros and must agree
995 with those made by function.c. */
997 /* See if this argument should be passed by invisible reference. */
998 if (pass_by_reference (args_so_far, TYPE_MODE (type),
999 type, argpos < n_named_args))
1001 bool callee_copies;
1002 tree base;
1004 callee_copies
1005 = reference_callee_copied (args_so_far, TYPE_MODE (type),
1006 type, argpos < n_named_args);
1008 /* If we're compiling a thunk, pass through invisible references
1009 instead of making a copy. */
1010 if (call_from_thunk_p
1011 || (callee_copies
1012 && !TREE_ADDRESSABLE (type)
1013 && (base = get_base_address (args[i].tree_value))
1014 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
1016 /* We can't use sibcalls if a callee-copied argument is
1017 stored in the current function's frame. */
1018 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1019 *may_tailcall = false;
1021 args[i].tree_value = build_fold_addr_expr (args[i].tree_value);
1022 type = TREE_TYPE (args[i].tree_value);
1024 *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK);
1026 else
1028 /* We make a copy of the object and pass the address to the
1029 function being called. */
1030 rtx copy;
1032 if (!COMPLETE_TYPE_P (type)
1033 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1034 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1035 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1036 STACK_CHECK_MAX_VAR_SIZE))))
1038 /* This is a variable-sized object. Make space on the stack
1039 for it. */
1040 rtx size_rtx = expr_size (TREE_VALUE (p));
1042 if (*old_stack_level == 0)
1044 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1045 *old_pending_adj = pending_stack_adjust;
1046 pending_stack_adjust = 0;
1049 copy = gen_rtx_MEM (BLKmode,
1050 allocate_dynamic_stack_space
1051 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1052 set_mem_attributes (copy, type, 1);
1054 else
1055 copy = assign_temp (type, 0, 1, 0);
1057 store_expr (args[i].tree_value, copy, 0);
1059 if (callee_copies)
1060 *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK);
1061 else
1062 *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1064 args[i].tree_value
1065 = build_fold_addr_expr (make_tree (type, copy));
1066 type = TREE_TYPE (args[i].tree_value);
1067 *may_tailcall = false;
1071 mode = TYPE_MODE (type);
1072 unsignedp = TYPE_UNSIGNED (type);
1074 if (targetm.calls.promote_function_args (fndecl ? TREE_TYPE (fndecl) : 0))
1075 mode = promote_mode (type, mode, &unsignedp, 1);
1077 args[i].unsignedp = unsignedp;
1078 args[i].mode = mode;
1080 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1081 argpos < n_named_args);
1082 #ifdef FUNCTION_INCOMING_ARG
1083 /* If this is a sibling call and the machine has register windows, the
1084 register window has to be unwinded before calling the routine, so
1085 arguments have to go into the incoming registers. */
1086 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1087 argpos < n_named_args);
1088 #else
1089 args[i].tail_call_reg = args[i].reg;
1090 #endif
1092 if (args[i].reg)
1093 args[i].partial
1094 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1095 argpos < n_named_args);
1097 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
1099 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1100 it means that we are to pass this arg in the register(s) designated
1101 by the PARALLEL, but also to pass it in the stack. */
1102 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1103 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1104 args[i].pass_on_stack = 1;
1106 /* If this is an addressable type, we must preallocate the stack
1107 since we must evaluate the object into its final location.
1109 If this is to be passed in both registers and the stack, it is simpler
1110 to preallocate. */
1111 if (TREE_ADDRESSABLE (type)
1112 || (args[i].pass_on_stack && args[i].reg != 0))
1113 *must_preallocate = 1;
1115 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1116 we cannot consider this function call constant. */
1117 if (TREE_ADDRESSABLE (type))
1118 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1120 /* Compute the stack-size of this argument. */
1121 if (args[i].reg == 0 || args[i].partial != 0
1122 || reg_parm_stack_space > 0
1123 || args[i].pass_on_stack)
1124 locate_and_pad_parm (mode, type,
1125 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1127 #else
1128 args[i].reg != 0,
1129 #endif
1130 args[i].pass_on_stack ? 0 : args[i].partial,
1131 fndecl, args_size, &args[i].locate);
1132 #ifdef BLOCK_REG_PADDING
1133 else
1134 /* The argument is passed entirely in registers. See at which
1135 end it should be padded. */
1136 args[i].locate.where_pad =
1137 BLOCK_REG_PADDING (mode, type,
1138 int_size_in_bytes (type) <= UNITS_PER_WORD);
1139 #endif
1141 /* Update ARGS_SIZE, the total stack space for args so far. */
1143 args_size->constant += args[i].locate.size.constant;
1144 if (args[i].locate.size.var)
1145 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1147 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1148 have been used, etc. */
1150 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1151 argpos < n_named_args);
1155 /* Update ARGS_SIZE to contain the total size for the argument block.
1156 Return the original constant component of the argument block's size.
1158 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1159 for arguments passed in registers. */
1161 static int
1162 compute_argument_block_size (int reg_parm_stack_space,
1163 struct args_size *args_size,
1164 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1166 int unadjusted_args_size = args_size->constant;
1168 /* For accumulate outgoing args mode we don't need to align, since the frame
1169 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1170 backends from generating misaligned frame sizes. */
1171 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1172 preferred_stack_boundary = STACK_BOUNDARY;
1174 /* Compute the actual size of the argument block required. The variable
1175 and constant sizes must be combined, the size may have to be rounded,
1176 and there may be a minimum required size. */
1178 if (args_size->var)
1180 args_size->var = ARGS_SIZE_TREE (*args_size);
1181 args_size->constant = 0;
1183 preferred_stack_boundary /= BITS_PER_UNIT;
1184 if (preferred_stack_boundary > 1)
1186 /* We don't handle this case yet. To handle it correctly we have
1187 to add the delta, round and subtract the delta.
1188 Currently no machine description requires this support. */
1189 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
1190 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1193 if (reg_parm_stack_space > 0)
1195 args_size->var
1196 = size_binop (MAX_EXPR, args_size->var,
1197 ssize_int (reg_parm_stack_space));
1199 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1200 /* The area corresponding to register parameters is not to count in
1201 the size of the block we need. So make the adjustment. */
1202 args_size->var
1203 = size_binop (MINUS_EXPR, args_size->var,
1204 ssize_int (reg_parm_stack_space));
1205 #endif
1208 else
1210 preferred_stack_boundary /= BITS_PER_UNIT;
1211 if (preferred_stack_boundary < 1)
1212 preferred_stack_boundary = 1;
1213 args_size->constant = (((args_size->constant
1214 + stack_pointer_delta
1215 + preferred_stack_boundary - 1)
1216 / preferred_stack_boundary
1217 * preferred_stack_boundary)
1218 - stack_pointer_delta);
1220 args_size->constant = MAX (args_size->constant,
1221 reg_parm_stack_space);
1223 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1224 args_size->constant -= reg_parm_stack_space;
1225 #endif
1227 return unadjusted_args_size;
1230 /* Precompute parameters as needed for a function call.
1232 FLAGS is mask of ECF_* constants.
1234 NUM_ACTUALS is the number of arguments.
1236 ARGS is an array containing information for each argument; this
1237 routine fills in the INITIAL_VALUE and VALUE fields for each
1238 precomputed argument. */
1240 static void
1241 precompute_arguments (int flags, int num_actuals, struct arg_data *args)
1243 int i;
1245 /* If this is a libcall, then precompute all arguments so that we do not
1246 get extraneous instructions emitted as part of the libcall sequence. */
1247 if ((flags & ECF_LIBCALL_BLOCK) == 0)
1248 return;
1250 for (i = 0; i < num_actuals; i++)
1252 enum machine_mode mode;
1254 /* If this is an addressable type, we cannot pre-evaluate it. */
1255 gcc_assert (!TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)));
1257 args[i].initial_value = args[i].value
1258 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1260 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1261 if (mode != args[i].mode)
1263 args[i].value
1264 = convert_modes (args[i].mode, mode,
1265 args[i].value, args[i].unsignedp);
1266 #if defined(PROMOTE_FUNCTION_MODE) && !defined(PROMOTE_MODE)
1267 /* CSE will replace this only if it contains args[i].value
1268 pseudo, so convert it down to the declared mode using
1269 a SUBREG. */
1270 if (REG_P (args[i].value)
1271 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1273 args[i].initial_value
1274 = gen_lowpart_SUBREG (mode, args[i].value);
1275 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1276 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1277 args[i].unsignedp);
1279 #endif
1284 /* Given the current state of MUST_PREALLOCATE and information about
1285 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1286 compute and return the final value for MUST_PREALLOCATE. */
1288 static int
1289 finalize_must_preallocate (int must_preallocate, int num_actuals, struct arg_data *args, struct args_size *args_size)
1291 /* See if we have or want to preallocate stack space.
1293 If we would have to push a partially-in-regs parm
1294 before other stack parms, preallocate stack space instead.
1296 If the size of some parm is not a multiple of the required stack
1297 alignment, we must preallocate.
1299 If the total size of arguments that would otherwise create a copy in
1300 a temporary (such as a CALL) is more than half the total argument list
1301 size, preallocation is faster.
1303 Another reason to preallocate is if we have a machine (like the m88k)
1304 where stack alignment is required to be maintained between every
1305 pair of insns, not just when the call is made. However, we assume here
1306 that such machines either do not have push insns (and hence preallocation
1307 would occur anyway) or the problem is taken care of with
1308 PUSH_ROUNDING. */
1310 if (! must_preallocate)
1312 int partial_seen = 0;
1313 int copy_to_evaluate_size = 0;
1314 int i;
1316 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1318 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1319 partial_seen = 1;
1320 else if (partial_seen && args[i].reg == 0)
1321 must_preallocate = 1;
1323 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1324 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1325 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1326 || TREE_CODE (args[i].tree_value) == COND_EXPR
1327 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1328 copy_to_evaluate_size
1329 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1332 if (copy_to_evaluate_size * 2 >= args_size->constant
1333 && args_size->constant > 0)
1334 must_preallocate = 1;
1336 return must_preallocate;
1339 /* If we preallocated stack space, compute the address of each argument
1340 and store it into the ARGS array.
1342 We need not ensure it is a valid memory address here; it will be
1343 validized when it is used.
1345 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1347 static void
1348 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1350 if (argblock)
1352 rtx arg_reg = argblock;
1353 int i, arg_offset = 0;
1355 if (GET_CODE (argblock) == PLUS)
1356 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1358 for (i = 0; i < num_actuals; i++)
1360 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1361 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1362 rtx addr;
1363 unsigned int align, boundary;
1365 /* Skip this parm if it will not be passed on the stack. */
1366 if (! args[i].pass_on_stack && args[i].reg != 0)
1367 continue;
1369 if (GET_CODE (offset) == CONST_INT)
1370 addr = plus_constant (arg_reg, INTVAL (offset));
1371 else
1372 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1374 addr = plus_constant (addr, arg_offset);
1375 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1376 set_mem_attributes (args[i].stack,
1377 TREE_TYPE (args[i].tree_value), 1);
1378 align = BITS_PER_UNIT;
1379 boundary = args[i].locate.boundary;
1380 if (args[i].locate.where_pad != downward)
1381 align = boundary;
1382 else if (GET_CODE (offset) == CONST_INT)
1384 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1385 align = align & -align;
1387 set_mem_align (args[i].stack, align);
1389 if (GET_CODE (slot_offset) == CONST_INT)
1390 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1391 else
1392 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1394 addr = plus_constant (addr, arg_offset);
1395 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1396 set_mem_attributes (args[i].stack_slot,
1397 TREE_TYPE (args[i].tree_value), 1);
1398 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
1400 /* Function incoming arguments may overlap with sibling call
1401 outgoing arguments and we cannot allow reordering of reads
1402 from function arguments with stores to outgoing arguments
1403 of sibling calls. */
1404 set_mem_alias_set (args[i].stack, 0);
1405 set_mem_alias_set (args[i].stack_slot, 0);
1410 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1411 in a call instruction.
1413 FNDECL is the tree node for the target function. For an indirect call
1414 FNDECL will be NULL_TREE.
1416 ADDR is the operand 0 of CALL_EXPR for this call. */
1418 static rtx
1419 rtx_for_function_call (tree fndecl, tree addr)
1421 rtx funexp;
1423 /* Get the function to call, in the form of RTL. */
1424 if (fndecl)
1426 /* If this is the first use of the function, see if we need to
1427 make an external definition for it. */
1428 if (! TREE_USED (fndecl))
1430 assemble_external (fndecl);
1431 TREE_USED (fndecl) = 1;
1434 /* Get a SYMBOL_REF rtx for the function address. */
1435 funexp = XEXP (DECL_RTL (fndecl), 0);
1437 else
1438 /* Generate an rtx (probably a pseudo-register) for the address. */
1440 push_temp_slots ();
1441 funexp = expand_expr (addr, NULL_RTX, VOIDmode, 0);
1442 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1444 return funexp;
1447 /* Do the register loads required for any wholly-register parms or any
1448 parms which are passed both on the stack and in a register. Their
1449 expressions were already evaluated.
1451 Mark all register-parms as living through the call, putting these USE
1452 insns in the CALL_INSN_FUNCTION_USAGE field.
1454 When IS_SIBCALL, perform the check_sibcall_overlap_argument_overlap
1455 checking, setting *SIBCALL_FAILURE if appropriate. */
1457 static void
1458 load_register_parameters (struct arg_data *args, int num_actuals,
1459 rtx *call_fusage, int flags, int is_sibcall,
1460 int *sibcall_failure)
1462 int i, j;
1464 for (i = 0; i < num_actuals; i++)
1466 rtx reg = ((flags & ECF_SIBCALL)
1467 ? args[i].tail_call_reg : args[i].reg);
1468 if (reg)
1470 int partial = args[i].partial;
1471 int nregs;
1472 int size = 0;
1473 rtx before_arg = get_last_insn ();
1474 /* Set to non-negative if must move a word at a time, even if just
1475 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1476 we just use a normal move insn. This value can be zero if the
1477 argument is a zero size structure with no fields. */
1478 nregs = -1;
1479 if (GET_CODE (reg) == PARALLEL)
1481 else if (partial)
1483 gcc_assert (partial % UNITS_PER_WORD == 0);
1484 nregs = partial / UNITS_PER_WORD;
1486 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1488 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1489 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1491 else
1492 size = GET_MODE_SIZE (args[i].mode);
1494 /* Handle calls that pass values in multiple non-contiguous
1495 locations. The Irix 6 ABI has examples of this. */
1497 if (GET_CODE (reg) == PARALLEL)
1498 emit_group_move (reg, args[i].parallel_value);
1500 /* If simple case, just do move. If normal partial, store_one_arg
1501 has already loaded the register for us. In all other cases,
1502 load the register(s) from memory. */
1504 else if (nregs == -1)
1506 emit_move_insn (reg, args[i].value);
1507 #ifdef BLOCK_REG_PADDING
1508 /* Handle case where we have a value that needs shifting
1509 up to the msb. eg. a QImode value and we're padding
1510 upward on a BYTES_BIG_ENDIAN machine. */
1511 if (size < UNITS_PER_WORD
1512 && (args[i].locate.where_pad
1513 == (BYTES_BIG_ENDIAN ? upward : downward)))
1515 rtx x;
1516 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1518 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1519 report the whole reg as used. Strictly speaking, the
1520 call only uses SIZE bytes at the msb end, but it doesn't
1521 seem worth generating rtl to say that. */
1522 reg = gen_rtx_REG (word_mode, REGNO (reg));
1523 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
1524 build_int_cst (NULL_TREE, shift),
1525 reg, 1);
1526 if (x != reg)
1527 emit_move_insn (reg, x);
1529 #endif
1532 /* If we have pre-computed the values to put in the registers in
1533 the case of non-aligned structures, copy them in now. */
1535 else if (args[i].n_aligned_regs != 0)
1536 for (j = 0; j < args[i].n_aligned_regs; j++)
1537 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1538 args[i].aligned_regs[j]);
1540 else if (partial == 0 || args[i].pass_on_stack)
1542 rtx mem = validize_mem (args[i].value);
1544 /* Handle a BLKmode that needs shifting. */
1545 if (nregs == 1 && size < UNITS_PER_WORD
1546 #ifdef BLOCK_REG_PADDING
1547 && args[i].locate.where_pad == downward
1548 #else
1549 && BYTES_BIG_ENDIAN
1550 #endif
1553 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1554 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1555 rtx x = gen_reg_rtx (word_mode);
1556 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1557 enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1558 : LSHIFT_EXPR;
1560 emit_move_insn (x, tem);
1561 x = expand_shift (dir, word_mode, x,
1562 build_int_cst (NULL_TREE, shift),
1563 ri, 1);
1564 if (x != ri)
1565 emit_move_insn (ri, x);
1567 else
1568 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1571 /* When a parameter is a block, and perhaps in other cases, it is
1572 possible that it did a load from an argument slot that was
1573 already clobbered. */
1574 if (is_sibcall
1575 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1576 *sibcall_failure = 1;
1578 /* Handle calls that pass values in multiple non-contiguous
1579 locations. The Irix 6 ABI has examples of this. */
1580 if (GET_CODE (reg) == PARALLEL)
1581 use_group_regs (call_fusage, reg);
1582 else if (nregs == -1)
1583 use_reg (call_fusage, reg);
1584 else if (nregs > 0)
1585 use_regs (call_fusage, REGNO (reg), nregs);
1590 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1591 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1592 bytes, then we would need to push some additional bytes to pad the
1593 arguments. So, we compute an adjust to the stack pointer for an
1594 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1595 bytes. Then, when the arguments are pushed the stack will be perfectly
1596 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1597 be popped after the call. Returns the adjustment. */
1599 static int
1600 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1601 struct args_size *args_size,
1602 unsigned int preferred_unit_stack_boundary)
1604 /* The number of bytes to pop so that the stack will be
1605 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1606 HOST_WIDE_INT adjustment;
1607 /* The alignment of the stack after the arguments are pushed, if we
1608 just pushed the arguments without adjust the stack here. */
1609 unsigned HOST_WIDE_INT unadjusted_alignment;
1611 unadjusted_alignment
1612 = ((stack_pointer_delta + unadjusted_args_size)
1613 % preferred_unit_stack_boundary);
1615 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1616 as possible -- leaving just enough left to cancel out the
1617 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1618 PENDING_STACK_ADJUST is non-negative, and congruent to
1619 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1621 /* Begin by trying to pop all the bytes. */
1622 unadjusted_alignment
1623 = (unadjusted_alignment
1624 - (pending_stack_adjust % preferred_unit_stack_boundary));
1625 adjustment = pending_stack_adjust;
1626 /* Push enough additional bytes that the stack will be aligned
1627 after the arguments are pushed. */
1628 if (preferred_unit_stack_boundary > 1)
1630 if (unadjusted_alignment > 0)
1631 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1632 else
1633 adjustment += unadjusted_alignment;
1636 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1637 bytes after the call. The right number is the entire
1638 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1639 by the arguments in the first place. */
1640 args_size->constant
1641 = pending_stack_adjust - adjustment + unadjusted_args_size;
1643 return adjustment;
1646 /* Scan X expression if it does not dereference any argument slots
1647 we already clobbered by tail call arguments (as noted in stored_args_map
1648 bitmap).
1649 Return nonzero if X expression dereferences such argument slots,
1650 zero otherwise. */
1652 static int
1653 check_sibcall_argument_overlap_1 (rtx x)
1655 RTX_CODE code;
1656 int i, j;
1657 unsigned int k;
1658 const char *fmt;
1660 if (x == NULL_RTX)
1661 return 0;
1663 code = GET_CODE (x);
1665 if (code == MEM)
1667 if (XEXP (x, 0) == current_function_internal_arg_pointer)
1668 i = 0;
1669 else if (GET_CODE (XEXP (x, 0)) == PLUS
1670 && XEXP (XEXP (x, 0), 0) ==
1671 current_function_internal_arg_pointer
1672 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
1673 i = INTVAL (XEXP (XEXP (x, 0), 1));
1674 else
1675 return 0;
1677 #ifdef ARGS_GROW_DOWNWARD
1678 i = -i - GET_MODE_SIZE (GET_MODE (x));
1679 #endif
1681 for (k = 0; k < GET_MODE_SIZE (GET_MODE (x)); k++)
1682 if (i + k < stored_args_map->n_bits
1683 && TEST_BIT (stored_args_map, i + k))
1684 return 1;
1686 return 0;
1689 /* Scan all subexpressions. */
1690 fmt = GET_RTX_FORMAT (code);
1691 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1693 if (*fmt == 'e')
1695 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1696 return 1;
1698 else if (*fmt == 'E')
1700 for (j = 0; j < XVECLEN (x, i); j++)
1701 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1702 return 1;
1705 return 0;
1708 /* Scan sequence after INSN if it does not dereference any argument slots
1709 we already clobbered by tail call arguments (as noted in stored_args_map
1710 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1711 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1712 should be 0). Return nonzero if sequence after INSN dereferences such argument
1713 slots, zero otherwise. */
1715 static int
1716 check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
1718 int low, high;
1720 if (insn == NULL_RTX)
1721 insn = get_insns ();
1722 else
1723 insn = NEXT_INSN (insn);
1725 for (; insn; insn = NEXT_INSN (insn))
1726 if (INSN_P (insn)
1727 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
1728 break;
1730 if (mark_stored_args_map)
1732 #ifdef ARGS_GROW_DOWNWARD
1733 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
1734 #else
1735 low = arg->locate.slot_offset.constant;
1736 #endif
1738 for (high = low + arg->locate.size.constant; low < high; low++)
1739 SET_BIT (stored_args_map, low);
1741 return insn != NULL_RTX;
1744 /* Given that a function returns a value of mode MODE at the most
1745 significant end of hard register VALUE, shift VALUE left or right
1746 as specified by LEFT_P. Return true if some action was needed. */
1748 bool
1749 shift_return_value (enum machine_mode mode, bool left_p, rtx value)
1751 HOST_WIDE_INT shift;
1753 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
1754 shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
1755 if (shift == 0)
1756 return false;
1758 /* Use ashr rather than lshr for right shifts. This is for the benefit
1759 of the MIPS port, which requires SImode values to be sign-extended
1760 when stored in 64-bit registers. */
1761 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
1762 value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
1763 gcc_unreachable ();
1764 return true;
1767 /* Remove all REG_EQUIV notes found in the insn chain. */
1769 static void
1770 purge_reg_equiv_notes (void)
1772 rtx insn;
1774 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1776 while (1)
1778 rtx note = find_reg_note (insn, REG_EQUIV, 0);
1779 if (note)
1781 /* Remove the note and keep looking at the notes for
1782 this insn. */
1783 remove_note (insn, note);
1784 continue;
1786 break;
1791 /* Generate all the code for a function call
1792 and return an rtx for its value.
1793 Store the value in TARGET (specified as an rtx) if convenient.
1794 If the value is stored in TARGET then TARGET is returned.
1795 If IGNORE is nonzero, then we ignore the value of the function call. */
1798 expand_call (tree exp, rtx target, int ignore)
1800 /* Nonzero if we are currently expanding a call. */
1801 static int currently_expanding_call = 0;
1803 /* List of actual parameters. */
1804 tree actparms = TREE_OPERAND (exp, 1);
1805 /* RTX for the function to be called. */
1806 rtx funexp;
1807 /* Sequence of insns to perform a normal "call". */
1808 rtx normal_call_insns = NULL_RTX;
1809 /* Sequence of insns to perform a tail "call". */
1810 rtx tail_call_insns = NULL_RTX;
1811 /* Data type of the function. */
1812 tree funtype;
1813 tree type_arg_types;
1814 /* Declaration of the function being called,
1815 or 0 if the function is computed (not known by name). */
1816 tree fndecl = 0;
1817 /* The type of the function being called. */
1818 tree fntype;
1819 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
1820 int pass;
1822 /* Register in which non-BLKmode value will be returned,
1823 or 0 if no value or if value is BLKmode. */
1824 rtx valreg;
1825 /* Address where we should return a BLKmode value;
1826 0 if value not BLKmode. */
1827 rtx structure_value_addr = 0;
1828 /* Nonzero if that address is being passed by treating it as
1829 an extra, implicit first parameter. Otherwise,
1830 it is passed by being copied directly into struct_value_rtx. */
1831 int structure_value_addr_parm = 0;
1832 /* Size of aggregate value wanted, or zero if none wanted
1833 or if we are using the non-reentrant PCC calling convention
1834 or expecting the value in registers. */
1835 HOST_WIDE_INT struct_value_size = 0;
1836 /* Nonzero if called function returns an aggregate in memory PCC style,
1837 by returning the address of where to find it. */
1838 int pcc_struct_value = 0;
1839 rtx struct_value = 0;
1841 /* Number of actual parameters in this call, including struct value addr. */
1842 int num_actuals;
1843 /* Number of named args. Args after this are anonymous ones
1844 and they must all go on the stack. */
1845 int n_named_args;
1847 /* Vector of information about each argument.
1848 Arguments are numbered in the order they will be pushed,
1849 not the order they are written. */
1850 struct arg_data *args;
1852 /* Total size in bytes of all the stack-parms scanned so far. */
1853 struct args_size args_size;
1854 struct args_size adjusted_args_size;
1855 /* Size of arguments before any adjustments (such as rounding). */
1856 int unadjusted_args_size;
1857 /* Data on reg parms scanned so far. */
1858 CUMULATIVE_ARGS args_so_far;
1859 /* Nonzero if a reg parm has been scanned. */
1860 int reg_parm_seen;
1861 /* Nonzero if this is an indirect function call. */
1863 /* Nonzero if we must avoid push-insns in the args for this call.
1864 If stack space is allocated for register parameters, but not by the
1865 caller, then it is preallocated in the fixed part of the stack frame.
1866 So the entire argument block must then be preallocated (i.e., we
1867 ignore PUSH_ROUNDING in that case). */
1869 int must_preallocate = !PUSH_ARGS;
1871 /* Size of the stack reserved for parameter registers. */
1872 int reg_parm_stack_space = 0;
1874 /* Address of space preallocated for stack parms
1875 (on machines that lack push insns), or 0 if space not preallocated. */
1876 rtx argblock = 0;
1878 /* Mask of ECF_ flags. */
1879 int flags = 0;
1880 #ifdef REG_PARM_STACK_SPACE
1881 /* Define the boundary of the register parm stack space that needs to be
1882 saved, if any. */
1883 int low_to_save, high_to_save;
1884 rtx save_area = 0; /* Place that it is saved */
1885 #endif
1887 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1888 char *initial_stack_usage_map = stack_usage_map;
1890 int old_stack_allocated;
1892 /* State variables to track stack modifications. */
1893 rtx old_stack_level = 0;
1894 int old_stack_arg_under_construction = 0;
1895 int old_pending_adj = 0;
1896 int old_inhibit_defer_pop = inhibit_defer_pop;
1898 /* Some stack pointer alterations we make are performed via
1899 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
1900 which we then also need to save/restore along the way. */
1901 int old_stack_pointer_delta = 0;
1903 rtx call_fusage;
1904 tree p = TREE_OPERAND (exp, 0);
1905 tree addr = TREE_OPERAND (exp, 0);
1906 int i;
1907 /* The alignment of the stack, in bits. */
1908 unsigned HOST_WIDE_INT preferred_stack_boundary;
1909 /* The alignment of the stack, in bytes. */
1910 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
1911 /* The static chain value to use for this call. */
1912 rtx static_chain_value;
1913 /* See if this is "nothrow" function call. */
1914 if (TREE_NOTHROW (exp))
1915 flags |= ECF_NOTHROW;
1917 /* See if we can find a DECL-node for the actual function, and get the
1918 function attributes (flags) from the function decl or type node. */
1919 fndecl = get_callee_fndecl (exp);
1920 if (fndecl)
1922 fntype = TREE_TYPE (fndecl);
1923 flags |= flags_from_decl_or_type (fndecl);
1925 else
1927 fntype = TREE_TYPE (TREE_TYPE (p));
1928 flags |= flags_from_decl_or_type (fntype);
1931 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
1933 /* Warn if this value is an aggregate type,
1934 regardless of which calling convention we are using for it. */
1935 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
1936 warning ("function call has aggregate value");
1938 /* If the result of a pure or const function call is ignored (or void),
1939 and none of its arguments are volatile, we can avoid expanding the
1940 call and just evaluate the arguments for side-effects. */
1941 if ((flags & (ECF_CONST | ECF_PURE))
1942 && (ignore || target == const0_rtx
1943 || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
1945 bool volatilep = false;
1946 tree arg;
1948 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
1949 if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
1951 volatilep = true;
1952 break;
1955 if (! volatilep)
1957 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
1958 expand_expr (TREE_VALUE (arg), const0_rtx,
1959 VOIDmode, EXPAND_NORMAL);
1960 return const0_rtx;
1964 #ifdef REG_PARM_STACK_SPACE
1965 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1966 #endif
1968 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1969 if (reg_parm_stack_space > 0 && PUSH_ARGS)
1970 must_preallocate = 1;
1971 #endif
1973 /* Set up a place to return a structure. */
1975 /* Cater to broken compilers. */
1976 if (aggregate_value_p (exp, fndecl))
1978 /* This call returns a big structure. */
1979 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1981 #ifdef PCC_STATIC_STRUCT_RETURN
1983 pcc_struct_value = 1;
1985 #else /* not PCC_STATIC_STRUCT_RETURN */
1987 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
1989 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (exp))
1991 /* The structure value address arg is already in actparms.
1992 Pull it out. It might be nice to just leave it there, but
1993 we need to set structure_value_addr. */
1994 tree return_arg = TREE_VALUE (actparms);
1995 actparms = TREE_CHAIN (actparms);
1996 structure_value_addr = expand_expr (return_arg, NULL_RTX,
1997 VOIDmode, EXPAND_NORMAL);
1999 #if 0
2000 else if (target && MEM_P (target))
2001 structure_value_addr = XEXP (target, 0);
2002 #endif
2003 else
2005 /* For variable-sized objects, we must be called with a target
2006 specified. If we were to allocate space on the stack here,
2007 we would have no way of knowing when to free it. */
2008 rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
2010 mark_temp_addr_taken (d);
2011 structure_value_addr = XEXP (d, 0);
2012 target = 0;
2015 #endif /* not PCC_STATIC_STRUCT_RETURN */
2018 /* Figure out the amount to which the stack should be aligned. */
2019 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2020 if (fndecl)
2022 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2023 if (i && i->preferred_incoming_stack_boundary)
2024 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2027 /* Operand 0 is a pointer-to-function; get the type of the function. */
2028 funtype = TREE_TYPE (addr);
2029 gcc_assert (POINTER_TYPE_P (funtype));
2030 funtype = TREE_TYPE (funtype);
2032 /* Munge the tree to split complex arguments into their imaginary
2033 and real parts. */
2034 if (targetm.calls.split_complex_arg)
2036 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2037 actparms = split_complex_values (actparms);
2039 else
2040 type_arg_types = TYPE_ARG_TYPES (funtype);
2042 if (flags & ECF_MAY_BE_ALLOCA)
2043 current_function_calls_alloca = 1;
2045 /* If struct_value_rtx is 0, it means pass the address
2046 as if it were an extra parameter. */
2047 if (structure_value_addr && struct_value == 0)
2049 /* If structure_value_addr is a REG other than
2050 virtual_outgoing_args_rtx, we can use always use it. If it
2051 is not a REG, we must always copy it into a register.
2052 If it is virtual_outgoing_args_rtx, we must copy it to another
2053 register in some cases. */
2054 rtx temp = (!REG_P (structure_value_addr)
2055 || (ACCUMULATE_OUTGOING_ARGS
2056 && stack_arg_under_construction
2057 && structure_value_addr == virtual_outgoing_args_rtx)
2058 ? copy_addr_to_reg (convert_memory_address
2059 (Pmode, structure_value_addr))
2060 : structure_value_addr);
2062 actparms
2063 = tree_cons (error_mark_node,
2064 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2065 temp),
2066 actparms);
2067 structure_value_addr_parm = 1;
2070 /* Count the arguments and set NUM_ACTUALS. */
2071 for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2072 num_actuals++;
2074 /* Compute number of named args.
2075 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2077 if (type_arg_types != 0)
2078 n_named_args
2079 = (list_length (type_arg_types)
2080 /* Count the struct value address, if it is passed as a parm. */
2081 + structure_value_addr_parm);
2082 else
2083 /* If we know nothing, treat all args as named. */
2084 n_named_args = num_actuals;
2086 /* Start updating where the next arg would go.
2088 On some machines (such as the PA) indirect calls have a different
2089 calling convention than normal calls. The fourth argument in
2090 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2091 or not. */
2092 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl, n_named_args);
2094 /* Now possibly adjust the number of named args.
2095 Normally, don't include the last named arg if anonymous args follow.
2096 We do include the last named arg if
2097 targetm.calls.strict_argument_naming() returns nonzero.
2098 (If no anonymous args follow, the result of list_length is actually
2099 one too large. This is harmless.)
2101 If targetm.calls.pretend_outgoing_varargs_named() returns
2102 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2103 this machine will be able to place unnamed args that were passed
2104 in registers into the stack. So treat all args as named. This
2105 allows the insns emitting for a specific argument list to be
2106 independent of the function declaration.
2108 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2109 we do not have any reliable way to pass unnamed args in
2110 registers, so we must force them into memory. */
2112 if (type_arg_types != 0
2113 && targetm.calls.strict_argument_naming (&args_so_far))
2115 else if (type_arg_types != 0
2116 && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
2117 /* Don't include the last named arg. */
2118 --n_named_args;
2119 else
2120 /* Treat all args as named. */
2121 n_named_args = num_actuals;
2123 /* Make a vector to hold all the information about each arg. */
2124 args = alloca (num_actuals * sizeof (struct arg_data));
2125 memset (args, 0, num_actuals * sizeof (struct arg_data));
2127 /* Build up entries in the ARGS array, compute the size of the
2128 arguments into ARGS_SIZE, etc. */
2129 initialize_argument_information (num_actuals, args, &args_size,
2130 n_named_args, actparms, fndecl,
2131 &args_so_far, reg_parm_stack_space,
2132 &old_stack_level, &old_pending_adj,
2133 &must_preallocate, &flags,
2134 &try_tail_call, CALL_FROM_THUNK_P (exp));
2136 if (args_size.var)
2138 /* If this function requires a variable-sized argument list, don't
2139 try to make a cse'able block for this call. We may be able to
2140 do this eventually, but it is too complicated to keep track of
2141 what insns go in the cse'able block and which don't. */
2143 flags &= ~ECF_LIBCALL_BLOCK;
2144 must_preallocate = 1;
2147 /* Now make final decision about preallocating stack space. */
2148 must_preallocate = finalize_must_preallocate (must_preallocate,
2149 num_actuals, args,
2150 &args_size);
2152 /* If the structure value address will reference the stack pointer, we
2153 must stabilize it. We don't need to do this if we know that we are
2154 not going to adjust the stack pointer in processing this call. */
2156 if (structure_value_addr
2157 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2158 || reg_mentioned_p (virtual_outgoing_args_rtx,
2159 structure_value_addr))
2160 && (args_size.var
2161 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2162 structure_value_addr = copy_to_reg (structure_value_addr);
2164 /* Tail calls can make things harder to debug, and we've traditionally
2165 pushed these optimizations into -O2. Don't try if we're already
2166 expanding a call, as that means we're an argument. Don't try if
2167 there's cleanups, as we know there's code to follow the call. */
2169 if (currently_expanding_call++ != 0
2170 || !flag_optimize_sibling_calls
2171 || args_size.var
2172 || lookup_stmt_eh_region (exp) >= 0)
2173 try_tail_call = 0;
2175 /* Rest of purposes for tail call optimizations to fail. */
2176 if (
2177 #ifdef HAVE_sibcall_epilogue
2178 !HAVE_sibcall_epilogue
2179 #else
2181 #endif
2182 || !try_tail_call
2183 /* Doing sibling call optimization needs some work, since
2184 structure_value_addr can be allocated on the stack.
2185 It does not seem worth the effort since few optimizable
2186 sibling calls will return a structure. */
2187 || structure_value_addr != NULL_RTX
2188 /* Check whether the target is able to optimize the call
2189 into a sibcall. */
2190 || !targetm.function_ok_for_sibcall (fndecl, exp)
2191 /* Functions that do not return exactly once may not be sibcall
2192 optimized. */
2193 || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
2194 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2195 /* If the called function is nested in the current one, it might access
2196 some of the caller's arguments, but could clobber them beforehand if
2197 the argument areas are shared. */
2198 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2199 /* If this function requires more stack slots than the current
2200 function, we cannot change it into a sibling call. */
2201 || args_size.constant > current_function_args_size
2202 /* If the callee pops its own arguments, then it must pop exactly
2203 the same number of arguments as the current function. */
2204 || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2205 != RETURN_POPS_ARGS (current_function_decl,
2206 TREE_TYPE (current_function_decl),
2207 current_function_args_size))
2208 || !lang_hooks.decls.ok_for_sibcall (fndecl))
2209 try_tail_call = 0;
2211 /* Ensure current function's preferred stack boundary is at least
2212 what we need. We don't have to increase alignment for recursive
2213 functions. */
2214 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2215 && fndecl != current_function_decl)
2216 cfun->preferred_stack_boundary = preferred_stack_boundary;
2217 if (fndecl == current_function_decl)
2218 cfun->recursive_call_emit = true;
2220 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2222 /* We want to make two insn chains; one for a sibling call, the other
2223 for a normal call. We will select one of the two chains after
2224 initial RTL generation is complete. */
2225 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2227 int sibcall_failure = 0;
2228 /* We want to emit any pending stack adjustments before the tail
2229 recursion "call". That way we know any adjustment after the tail
2230 recursion call can be ignored if we indeed use the tail
2231 call expansion. */
2232 int save_pending_stack_adjust = 0;
2233 int save_stack_pointer_delta = 0;
2234 rtx insns;
2235 rtx before_call, next_arg_reg;
2237 if (pass == 0)
2239 /* State variables we need to save and restore between
2240 iterations. */
2241 save_pending_stack_adjust = pending_stack_adjust;
2242 save_stack_pointer_delta = stack_pointer_delta;
2244 if (pass)
2245 flags &= ~ECF_SIBCALL;
2246 else
2247 flags |= ECF_SIBCALL;
2249 /* Other state variables that we must reinitialize each time
2250 through the loop (that are not initialized by the loop itself). */
2251 argblock = 0;
2252 call_fusage = 0;
2254 /* Start a new sequence for the normal call case.
2256 From this point on, if the sibling call fails, we want to set
2257 sibcall_failure instead of continuing the loop. */
2258 start_sequence ();
2260 /* Don't let pending stack adjusts add up to too much.
2261 Also, do all pending adjustments now if there is any chance
2262 this might be a call to alloca or if we are expanding a sibling
2263 call sequence or if we are calling a function that is to return
2264 with stack pointer depressed.
2265 Also do the adjustments before a throwing call, otherwise
2266 exception handling can fail; PR 19225. */
2267 if (pending_stack_adjust >= 32
2268 || (pending_stack_adjust > 0
2269 && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
2270 || (pending_stack_adjust > 0
2271 && flag_exceptions && !(flags & ECF_NOTHROW))
2272 || pass == 0)
2273 do_pending_stack_adjust ();
2275 /* When calling a const function, we must pop the stack args right away,
2276 so that the pop is deleted or moved with the call. */
2277 if (pass && (flags & ECF_LIBCALL_BLOCK))
2278 NO_DEFER_POP;
2280 /* Precompute any arguments as needed. */
2281 if (pass)
2282 precompute_arguments (flags, num_actuals, args);
2284 /* Now we are about to start emitting insns that can be deleted
2285 if a libcall is deleted. */
2286 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
2287 start_sequence ();
2289 adjusted_args_size = args_size;
2290 /* Compute the actual size of the argument block required. The variable
2291 and constant sizes must be combined, the size may have to be rounded,
2292 and there may be a minimum required size. When generating a sibcall
2293 pattern, do not round up, since we'll be re-using whatever space our
2294 caller provided. */
2295 unadjusted_args_size
2296 = compute_argument_block_size (reg_parm_stack_space,
2297 &adjusted_args_size,
2298 (pass == 0 ? 0
2299 : preferred_stack_boundary));
2301 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2303 /* The argument block when performing a sibling call is the
2304 incoming argument block. */
2305 if (pass == 0)
2307 argblock = virtual_incoming_args_rtx;
2308 argblock
2309 #ifdef STACK_GROWS_DOWNWARD
2310 = plus_constant (argblock, current_function_pretend_args_size);
2311 #else
2312 = plus_constant (argblock, -current_function_pretend_args_size);
2313 #endif
2314 stored_args_map = sbitmap_alloc (args_size.constant);
2315 sbitmap_zero (stored_args_map);
2318 /* If we have no actual push instructions, or shouldn't use them,
2319 make space for all args right now. */
2320 else if (adjusted_args_size.var != 0)
2322 if (old_stack_level == 0)
2324 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2325 old_stack_pointer_delta = stack_pointer_delta;
2326 old_pending_adj = pending_stack_adjust;
2327 pending_stack_adjust = 0;
2328 /* stack_arg_under_construction says whether a stack arg is
2329 being constructed at the old stack level. Pushing the stack
2330 gets a clean outgoing argument block. */
2331 old_stack_arg_under_construction = stack_arg_under_construction;
2332 stack_arg_under_construction = 0;
2334 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2336 else
2338 /* Note that we must go through the motions of allocating an argument
2339 block even if the size is zero because we may be storing args
2340 in the area reserved for register arguments, which may be part of
2341 the stack frame. */
2343 int needed = adjusted_args_size.constant;
2345 /* Store the maximum argument space used. It will be pushed by
2346 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2347 checking). */
2349 if (needed > current_function_outgoing_args_size)
2350 current_function_outgoing_args_size = needed;
2352 if (must_preallocate)
2354 if (ACCUMULATE_OUTGOING_ARGS)
2356 /* Since the stack pointer will never be pushed, it is
2357 possible for the evaluation of a parm to clobber
2358 something we have already written to the stack.
2359 Since most function calls on RISC machines do not use
2360 the stack, this is uncommon, but must work correctly.
2362 Therefore, we save any area of the stack that was already
2363 written and that we are using. Here we set up to do this
2364 by making a new stack usage map from the old one. The
2365 actual save will be done by store_one_arg.
2367 Another approach might be to try to reorder the argument
2368 evaluations to avoid this conflicting stack usage. */
2370 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2371 /* Since we will be writing into the entire argument area,
2372 the map must be allocated for its entire size, not just
2373 the part that is the responsibility of the caller. */
2374 needed += reg_parm_stack_space;
2375 #endif
2377 #ifdef ARGS_GROW_DOWNWARD
2378 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2379 needed + 1);
2380 #else
2381 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2382 needed);
2383 #endif
2384 stack_usage_map = alloca (highest_outgoing_arg_in_use);
2386 if (initial_highest_arg_in_use)
2387 memcpy (stack_usage_map, initial_stack_usage_map,
2388 initial_highest_arg_in_use);
2390 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2391 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2392 (highest_outgoing_arg_in_use
2393 - initial_highest_arg_in_use));
2394 needed = 0;
2396 /* The address of the outgoing argument list must not be
2397 copied to a register here, because argblock would be left
2398 pointing to the wrong place after the call to
2399 allocate_dynamic_stack_space below. */
2401 argblock = virtual_outgoing_args_rtx;
2403 else
2405 if (inhibit_defer_pop == 0)
2407 /* Try to reuse some or all of the pending_stack_adjust
2408 to get this space. */
2409 needed
2410 = (combine_pending_stack_adjustment_and_call
2411 (unadjusted_args_size,
2412 &adjusted_args_size,
2413 preferred_unit_stack_boundary));
2415 /* combine_pending_stack_adjustment_and_call computes
2416 an adjustment before the arguments are allocated.
2417 Account for them and see whether or not the stack
2418 needs to go up or down. */
2419 needed = unadjusted_args_size - needed;
2421 if (needed < 0)
2423 /* We're releasing stack space. */
2424 /* ??? We can avoid any adjustment at all if we're
2425 already aligned. FIXME. */
2426 pending_stack_adjust = -needed;
2427 do_pending_stack_adjust ();
2428 needed = 0;
2430 else
2431 /* We need to allocate space. We'll do that in
2432 push_block below. */
2433 pending_stack_adjust = 0;
2436 /* Special case this because overhead of `push_block' in
2437 this case is non-trivial. */
2438 if (needed == 0)
2439 argblock = virtual_outgoing_args_rtx;
2440 else
2442 argblock = push_block (GEN_INT (needed), 0, 0);
2443 #ifdef ARGS_GROW_DOWNWARD
2444 argblock = plus_constant (argblock, needed);
2445 #endif
2448 /* We only really need to call `copy_to_reg' in the case
2449 where push insns are going to be used to pass ARGBLOCK
2450 to a function call in ARGS. In that case, the stack
2451 pointer changes value from the allocation point to the
2452 call point, and hence the value of
2453 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2454 as well always do it. */
2455 argblock = copy_to_reg (argblock);
2460 if (ACCUMULATE_OUTGOING_ARGS)
2462 /* The save/restore code in store_one_arg handles all
2463 cases except one: a constructor call (including a C
2464 function returning a BLKmode struct) to initialize
2465 an argument. */
2466 if (stack_arg_under_construction)
2468 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2469 rtx push_size = GEN_INT (reg_parm_stack_space
2470 + adjusted_args_size.constant);
2471 #else
2472 rtx push_size = GEN_INT (adjusted_args_size.constant);
2473 #endif
2474 if (old_stack_level == 0)
2476 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2477 NULL_RTX);
2478 old_stack_pointer_delta = stack_pointer_delta;
2479 old_pending_adj = pending_stack_adjust;
2480 pending_stack_adjust = 0;
2481 /* stack_arg_under_construction says whether a stack
2482 arg is being constructed at the old stack level.
2483 Pushing the stack gets a clean outgoing argument
2484 block. */
2485 old_stack_arg_under_construction
2486 = stack_arg_under_construction;
2487 stack_arg_under_construction = 0;
2488 /* Make a new map for the new argument list. */
2489 stack_usage_map = alloca (highest_outgoing_arg_in_use);
2490 memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
2491 highest_outgoing_arg_in_use = 0;
2493 allocate_dynamic_stack_space (push_size, NULL_RTX,
2494 BITS_PER_UNIT);
2497 /* If argument evaluation might modify the stack pointer,
2498 copy the address of the argument list to a register. */
2499 for (i = 0; i < num_actuals; i++)
2500 if (args[i].pass_on_stack)
2502 argblock = copy_addr_to_reg (argblock);
2503 break;
2507 compute_argument_addresses (args, argblock, num_actuals);
2509 /* If we push args individually in reverse order, perform stack alignment
2510 before the first push (the last arg). */
2511 if (PUSH_ARGS_REVERSED && argblock == 0
2512 && adjusted_args_size.constant != unadjusted_args_size)
2514 /* When the stack adjustment is pending, we get better code
2515 by combining the adjustments. */
2516 if (pending_stack_adjust
2517 && ! (flags & ECF_LIBCALL_BLOCK)
2518 && ! inhibit_defer_pop)
2520 pending_stack_adjust
2521 = (combine_pending_stack_adjustment_and_call
2522 (unadjusted_args_size,
2523 &adjusted_args_size,
2524 preferred_unit_stack_boundary));
2525 do_pending_stack_adjust ();
2527 else if (argblock == 0)
2528 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2529 - unadjusted_args_size));
2531 /* Now that the stack is properly aligned, pops can't safely
2532 be deferred during the evaluation of the arguments. */
2533 NO_DEFER_POP;
2535 funexp = rtx_for_function_call (fndecl, addr);
2537 /* Figure out the register where the value, if any, will come back. */
2538 valreg = 0;
2539 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2540 && ! structure_value_addr)
2542 if (pcc_struct_value)
2543 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2544 fndecl, (pass == 0));
2545 else
2546 valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
2549 /* Precompute all register parameters. It isn't safe to compute anything
2550 once we have started filling any specific hard regs. */
2551 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2553 if (TREE_OPERAND (exp, 2))
2554 static_chain_value = expand_expr (TREE_OPERAND (exp, 2),
2555 NULL_RTX, VOIDmode, 0);
2556 else
2557 static_chain_value = 0;
2559 #ifdef REG_PARM_STACK_SPACE
2560 /* Save the fixed argument area if it's part of the caller's frame and
2561 is clobbered by argument setup for this call. */
2562 if (ACCUMULATE_OUTGOING_ARGS && pass)
2563 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2564 &low_to_save, &high_to_save);
2565 #endif
2567 /* Now store (and compute if necessary) all non-register parms.
2568 These come before register parms, since they can require block-moves,
2569 which could clobber the registers used for register parms.
2570 Parms which have partial registers are not stored here,
2571 but we do preallocate space here if they want that. */
2573 for (i = 0; i < num_actuals; i++)
2574 if (args[i].reg == 0 || args[i].pass_on_stack)
2576 rtx before_arg = get_last_insn ();
2578 if (store_one_arg (&args[i], argblock, flags,
2579 adjusted_args_size.var != 0,
2580 reg_parm_stack_space)
2581 || (pass == 0
2582 && check_sibcall_argument_overlap (before_arg,
2583 &args[i], 1)))
2584 sibcall_failure = 1;
2586 if (flags & ECF_CONST
2587 && args[i].stack
2588 && args[i].value == args[i].stack)
2589 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
2590 gen_rtx_USE (VOIDmode,
2591 args[i].value),
2592 call_fusage);
2595 /* If we have a parm that is passed in registers but not in memory
2596 and whose alignment does not permit a direct copy into registers,
2597 make a group of pseudos that correspond to each register that we
2598 will later fill. */
2599 if (STRICT_ALIGNMENT)
2600 store_unaligned_arguments_into_pseudos (args, num_actuals);
2602 /* Now store any partially-in-registers parm.
2603 This is the last place a block-move can happen. */
2604 if (reg_parm_seen)
2605 for (i = 0; i < num_actuals; i++)
2606 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2608 rtx before_arg = get_last_insn ();
2610 if (store_one_arg (&args[i], argblock, flags,
2611 adjusted_args_size.var != 0,
2612 reg_parm_stack_space)
2613 || (pass == 0
2614 && check_sibcall_argument_overlap (before_arg,
2615 &args[i], 1)))
2616 sibcall_failure = 1;
2619 /* If we pushed args in forward order, perform stack alignment
2620 after pushing the last arg. */
2621 if (!PUSH_ARGS_REVERSED && argblock == 0)
2622 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2623 - unadjusted_args_size));
2625 /* If register arguments require space on the stack and stack space
2626 was not preallocated, allocate stack space here for arguments
2627 passed in registers. */
2628 #ifdef OUTGOING_REG_PARM_STACK_SPACE
2629 if (!ACCUMULATE_OUTGOING_ARGS
2630 && must_preallocate == 0 && reg_parm_stack_space > 0)
2631 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2632 #endif
2634 /* Pass the function the address in which to return a
2635 structure value. */
2636 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2638 structure_value_addr
2639 = convert_memory_address (Pmode, structure_value_addr);
2640 emit_move_insn (struct_value,
2641 force_reg (Pmode,
2642 force_operand (structure_value_addr,
2643 NULL_RTX)));
2645 if (REG_P (struct_value))
2646 use_reg (&call_fusage, struct_value);
2649 funexp = prepare_call_address (funexp, static_chain_value,
2650 &call_fusage, reg_parm_seen, pass == 0);
2652 load_register_parameters (args, num_actuals, &call_fusage, flags,
2653 pass == 0, &sibcall_failure);
2655 /* Save a pointer to the last insn before the call, so that we can
2656 later safely search backwards to find the CALL_INSN. */
2657 before_call = get_last_insn ();
2659 /* Set up next argument register. For sibling calls on machines
2660 with register windows this should be the incoming register. */
2661 #ifdef FUNCTION_INCOMING_ARG
2662 if (pass == 0)
2663 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
2664 void_type_node, 1);
2665 else
2666 #endif
2667 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
2668 void_type_node, 1);
2670 /* All arguments and registers used for the call must be set up by
2671 now! */
2673 /* Stack must be properly aligned now. */
2674 gcc_assert (!pass
2675 || !(stack_pointer_delta % preferred_unit_stack_boundary));
2677 /* Generate the actual call instruction. */
2678 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
2679 adjusted_args_size.constant, struct_value_size,
2680 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
2681 flags, & args_so_far);
2683 /* If a non-BLKmode value is returned at the most significant end
2684 of a register, shift the register right by the appropriate amount
2685 and update VALREG accordingly. BLKmode values are handled by the
2686 group load/store machinery below. */
2687 if (!structure_value_addr
2688 && !pcc_struct_value
2689 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2690 && targetm.calls.return_in_msb (TREE_TYPE (exp)))
2692 if (shift_return_value (TYPE_MODE (TREE_TYPE (exp)), false, valreg))
2693 sibcall_failure = 1;
2694 valreg = gen_rtx_REG (TYPE_MODE (TREE_TYPE (exp)), REGNO (valreg));
2697 /* If call is cse'able, make appropriate pair of reg-notes around it.
2698 Test valreg so we don't crash; may safely ignore `const'
2699 if return type is void. Disable for PARALLEL return values, because
2700 we have no way to move such values into a pseudo register. */
2701 if (pass && (flags & ECF_LIBCALL_BLOCK))
2703 rtx insns;
2704 rtx insn;
2705 bool failed = valreg == 0 || GET_CODE (valreg) == PARALLEL;
2707 insns = get_insns ();
2709 /* Expansion of block moves possibly introduced a loop that may
2710 not appear inside libcall block. */
2711 for (insn = insns; insn; insn = NEXT_INSN (insn))
2712 if (JUMP_P (insn))
2713 failed = true;
2715 if (failed)
2717 end_sequence ();
2718 emit_insn (insns);
2720 else
2722 rtx note = 0;
2723 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2725 /* Mark the return value as a pointer if needed. */
2726 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2727 mark_reg_pointer (temp,
2728 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
2730 end_sequence ();
2731 if (flag_unsafe_math_optimizations
2732 && fndecl
2733 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2734 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRT
2735 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTF
2736 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTL))
2737 note = gen_rtx_fmt_e (SQRT,
2738 GET_MODE (temp),
2739 args[0].initial_value);
2740 else
2742 /* Construct an "equal form" for the value which
2743 mentions all the arguments in order as well as
2744 the function name. */
2745 for (i = 0; i < num_actuals; i++)
2746 note = gen_rtx_EXPR_LIST (VOIDmode,
2747 args[i].initial_value, note);
2748 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2750 if (flags & ECF_PURE)
2751 note = gen_rtx_EXPR_LIST (VOIDmode,
2752 gen_rtx_USE (VOIDmode,
2753 gen_rtx_MEM (BLKmode,
2754 gen_rtx_SCRATCH (VOIDmode))),
2755 note);
2757 emit_libcall_block (insns, temp, valreg, note);
2759 valreg = temp;
2762 else if (pass && (flags & ECF_MALLOC))
2764 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2765 rtx last, insns;
2767 /* The return value from a malloc-like function is a pointer. */
2768 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2769 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
2771 emit_move_insn (temp, valreg);
2773 /* The return value from a malloc-like function can not alias
2774 anything else. */
2775 last = get_last_insn ();
2776 REG_NOTES (last) =
2777 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2779 /* Write out the sequence. */
2780 insns = get_insns ();
2781 end_sequence ();
2782 emit_insn (insns);
2783 valreg = temp;
2786 /* For calls to `setjmp', etc., inform flow.c it should complain
2787 if nonvolatile values are live. For functions that cannot return,
2788 inform flow that control does not fall through. */
2790 if ((flags & ECF_NORETURN) || pass == 0)
2792 /* The barrier must be emitted
2793 immediately after the CALL_INSN. Some ports emit more
2794 than just a CALL_INSN above, so we must search for it here. */
2796 rtx last = get_last_insn ();
2797 while (!CALL_P (last))
2799 last = PREV_INSN (last);
2800 /* There was no CALL_INSN? */
2801 gcc_assert (last != before_call);
2804 emit_barrier_after (last);
2806 /* Stack adjustments after a noreturn call are dead code.
2807 However when NO_DEFER_POP is in effect, we must preserve
2808 stack_pointer_delta. */
2809 if (inhibit_defer_pop == 0)
2811 stack_pointer_delta = old_stack_allocated;
2812 pending_stack_adjust = 0;
2816 /* If value type not void, return an rtx for the value. */
2818 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2819 || ignore)
2820 target = const0_rtx;
2821 else if (structure_value_addr)
2823 if (target == 0 || !MEM_P (target))
2825 target
2826 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2827 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2828 structure_value_addr));
2829 set_mem_attributes (target, exp, 1);
2832 else if (pcc_struct_value)
2834 /* This is the special C++ case where we need to
2835 know what the true target was. We take care to
2836 never use this value more than once in one expression. */
2837 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2838 copy_to_reg (valreg));
2839 set_mem_attributes (target, exp, 1);
2841 /* Handle calls that return values in multiple non-contiguous locations.
2842 The Irix 6 ABI has examples of this. */
2843 else if (GET_CODE (valreg) == PARALLEL)
2845 if (target == 0)
2847 /* This will only be assigned once, so it can be readonly. */
2848 tree nt = build_qualified_type (TREE_TYPE (exp),
2849 (TYPE_QUALS (TREE_TYPE (exp))
2850 | TYPE_QUAL_CONST));
2852 target = assign_temp (nt, 0, 1, 1);
2855 if (! rtx_equal_p (target, valreg))
2856 emit_group_store (target, valreg, TREE_TYPE (exp),
2857 int_size_in_bytes (TREE_TYPE (exp)));
2859 /* We can not support sibling calls for this case. */
2860 sibcall_failure = 1;
2862 else if (target
2863 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2864 && GET_MODE (target) == GET_MODE (valreg))
2866 /* TARGET and VALREG cannot be equal at this point because the
2867 latter would not have REG_FUNCTION_VALUE_P true, while the
2868 former would if it were referring to the same register.
2870 If they refer to the same register, this move will be a no-op,
2871 except when function inlining is being done. */
2872 emit_move_insn (target, valreg);
2874 /* If we are setting a MEM, this code must be executed. Since it is
2875 emitted after the call insn, sibcall optimization cannot be
2876 performed in that case. */
2877 if (MEM_P (target))
2878 sibcall_failure = 1;
2880 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2882 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
2884 /* We can not support sibling calls for this case. */
2885 sibcall_failure = 1;
2887 else
2888 target = copy_to_reg (valreg);
2890 if (targetm.calls.promote_function_return(funtype))
2892 /* If we promoted this return value, make the proper SUBREG.
2893 TARGET might be const0_rtx here, so be careful. */
2894 if (REG_P (target)
2895 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2896 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2898 tree type = TREE_TYPE (exp);
2899 int unsignedp = TYPE_UNSIGNED (type);
2900 int offset = 0;
2901 enum machine_mode pmode;
2903 pmode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
2904 /* If we don't promote as expected, something is wrong. */
2905 gcc_assert (GET_MODE (target) == pmode);
2907 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
2908 && (GET_MODE_SIZE (GET_MODE (target))
2909 > GET_MODE_SIZE (TYPE_MODE (type))))
2911 offset = GET_MODE_SIZE (GET_MODE (target))
2912 - GET_MODE_SIZE (TYPE_MODE (type));
2913 if (! BYTES_BIG_ENDIAN)
2914 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
2915 else if (! WORDS_BIG_ENDIAN)
2916 offset %= UNITS_PER_WORD;
2918 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
2919 SUBREG_PROMOTED_VAR_P (target) = 1;
2920 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
2924 /* If size of args is variable or this was a constructor call for a stack
2925 argument, restore saved stack-pointer value. */
2927 if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
2929 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2930 stack_pointer_delta = old_stack_pointer_delta;
2931 pending_stack_adjust = old_pending_adj;
2932 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2933 stack_arg_under_construction = old_stack_arg_under_construction;
2934 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2935 stack_usage_map = initial_stack_usage_map;
2936 sibcall_failure = 1;
2938 else if (ACCUMULATE_OUTGOING_ARGS && pass)
2940 #ifdef REG_PARM_STACK_SPACE
2941 if (save_area)
2942 restore_fixed_argument_area (save_area, argblock,
2943 high_to_save, low_to_save);
2944 #endif
2946 /* If we saved any argument areas, restore them. */
2947 for (i = 0; i < num_actuals; i++)
2948 if (args[i].save_area)
2950 enum machine_mode save_mode = GET_MODE (args[i].save_area);
2951 rtx stack_area
2952 = gen_rtx_MEM (save_mode,
2953 memory_address (save_mode,
2954 XEXP (args[i].stack_slot, 0)));
2956 if (save_mode != BLKmode)
2957 emit_move_insn (stack_area, args[i].save_area);
2958 else
2959 emit_block_move (stack_area, args[i].save_area,
2960 GEN_INT (args[i].locate.size.constant),
2961 BLOCK_OP_CALL_PARM);
2964 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
2965 stack_usage_map = initial_stack_usage_map;
2968 /* If this was alloca, record the new stack level for nonlocal gotos.
2969 Check for the handler slots since we might not have a save area
2970 for non-local gotos. */
2972 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
2973 update_nonlocal_goto_save_area ();
2975 /* Free up storage we no longer need. */
2976 for (i = 0; i < num_actuals; ++i)
2977 if (args[i].aligned_regs)
2978 free (args[i].aligned_regs);
2980 insns = get_insns ();
2981 end_sequence ();
2983 if (pass == 0)
2985 tail_call_insns = insns;
2987 /* Restore the pending stack adjustment now that we have
2988 finished generating the sibling call sequence. */
2990 pending_stack_adjust = save_pending_stack_adjust;
2991 stack_pointer_delta = save_stack_pointer_delta;
2993 /* Prepare arg structure for next iteration. */
2994 for (i = 0; i < num_actuals; i++)
2996 args[i].value = 0;
2997 args[i].aligned_regs = 0;
2998 args[i].stack = 0;
3001 sbitmap_free (stored_args_map);
3003 else
3005 normal_call_insns = insns;
3007 /* Verify that we've deallocated all the stack we used. */
3008 gcc_assert ((flags & ECF_NORETURN)
3009 || (old_stack_allocated
3010 == stack_pointer_delta - pending_stack_adjust));
3013 /* If something prevents making this a sibling call,
3014 zero out the sequence. */
3015 if (sibcall_failure)
3016 tail_call_insns = NULL_RTX;
3017 else
3018 break;
3021 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3022 arguments too, as argument area is now clobbered by the call. */
3023 if (tail_call_insns)
3025 emit_insn (tail_call_insns);
3026 cfun->tail_call_emit = true;
3028 else
3029 emit_insn (normal_call_insns);
3031 currently_expanding_call--;
3033 /* If this function returns with the stack pointer depressed, ensure
3034 this block saves and restores the stack pointer, show it was
3035 changed, and adjust for any outgoing arg space. */
3036 if (flags & ECF_SP_DEPRESSED)
3038 clear_pending_stack_adjust ();
3039 emit_insn (gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx));
3040 emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3043 return target;
3046 /* A sibling call sequence invalidates any REG_EQUIV notes made for
3047 this function's incoming arguments.
3049 At the start of RTL generation we know the only REG_EQUIV notes
3050 in the rtl chain are those for incoming arguments, so we can safely
3051 flush any REG_EQUIV note.
3053 This is (slight) overkill. We could keep track of the highest
3054 argument we clobber and be more selective in removing notes, but it
3055 does not seem to be worth the effort. */
3056 void
3057 fixup_tail_calls (void)
3059 purge_reg_equiv_notes ();
3062 /* Traverse an argument list in VALUES and expand all complex
3063 arguments into their components. */
3064 static tree
3065 split_complex_values (tree values)
3067 tree p;
3069 /* Before allocating memory, check for the common case of no complex. */
3070 for (p = values; p; p = TREE_CHAIN (p))
3072 tree type = TREE_TYPE (TREE_VALUE (p));
3073 if (type && TREE_CODE (type) == COMPLEX_TYPE
3074 && targetm.calls.split_complex_arg (type))
3075 goto found;
3077 return values;
3079 found:
3080 values = copy_list (values);
3082 for (p = values; p; p = TREE_CHAIN (p))
3084 tree complex_value = TREE_VALUE (p);
3085 tree complex_type;
3087 complex_type = TREE_TYPE (complex_value);
3088 if (!complex_type)
3089 continue;
3091 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3092 && targetm.calls.split_complex_arg (complex_type))
3094 tree subtype;
3095 tree real, imag, next;
3097 subtype = TREE_TYPE (complex_type);
3098 complex_value = save_expr (complex_value);
3099 real = build1 (REALPART_EXPR, subtype, complex_value);
3100 imag = build1 (IMAGPART_EXPR, subtype, complex_value);
3102 TREE_VALUE (p) = real;
3103 next = TREE_CHAIN (p);
3104 imag = build_tree_list (NULL_TREE, imag);
3105 TREE_CHAIN (p) = imag;
3106 TREE_CHAIN (imag) = next;
3108 /* Skip the newly created node. */
3109 p = TREE_CHAIN (p);
3113 return values;
3116 /* Traverse a list of TYPES and expand all complex types into their
3117 components. */
3118 static tree
3119 split_complex_types (tree types)
3121 tree p;
3123 /* Before allocating memory, check for the common case of no complex. */
3124 for (p = types; p; p = TREE_CHAIN (p))
3126 tree type = TREE_VALUE (p);
3127 if (TREE_CODE (type) == COMPLEX_TYPE
3128 && targetm.calls.split_complex_arg (type))
3129 goto found;
3131 return types;
3133 found:
3134 types = copy_list (types);
3136 for (p = types; p; p = TREE_CHAIN (p))
3138 tree complex_type = TREE_VALUE (p);
3140 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3141 && targetm.calls.split_complex_arg (complex_type))
3143 tree next, imag;
3145 /* Rewrite complex type with component type. */
3146 TREE_VALUE (p) = TREE_TYPE (complex_type);
3147 next = TREE_CHAIN (p);
3149 /* Add another component type for the imaginary part. */
3150 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3151 TREE_CHAIN (p) = imag;
3152 TREE_CHAIN (imag) = next;
3154 /* Skip the newly created node. */
3155 p = TREE_CHAIN (p);
3159 return types;
3162 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3163 The RETVAL parameter specifies whether return value needs to be saved, other
3164 parameters are documented in the emit_library_call function below. */
3166 static rtx
3167 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3168 enum libcall_type fn_type,
3169 enum machine_mode outmode, int nargs, va_list p)
3171 /* Total size in bytes of all the stack-parms scanned so far. */
3172 struct args_size args_size;
3173 /* Size of arguments before any adjustments (such as rounding). */
3174 struct args_size original_args_size;
3175 int argnum;
3176 rtx fun;
3177 int inc;
3178 int count;
3179 rtx argblock = 0;
3180 CUMULATIVE_ARGS args_so_far;
3181 struct arg
3183 rtx value;
3184 enum machine_mode mode;
3185 rtx reg;
3186 int partial;
3187 struct locate_and_pad_arg_data locate;
3188 rtx save_area;
3190 struct arg *argvec;
3191 int old_inhibit_defer_pop = inhibit_defer_pop;
3192 rtx call_fusage = 0;
3193 rtx mem_value = 0;
3194 rtx valreg;
3195 int pcc_struct_value = 0;
3196 int struct_value_size = 0;
3197 int flags;
3198 int reg_parm_stack_space = 0;
3199 int needed;
3200 rtx before_call;
3201 tree tfom; /* type_for_mode (outmode, 0) */
3203 #ifdef REG_PARM_STACK_SPACE
3204 /* Define the boundary of the register parm stack space that needs to be
3205 save, if any. */
3206 int low_to_save, high_to_save;
3207 rtx save_area = 0; /* Place that it is saved. */
3208 #endif
3210 /* Size of the stack reserved for parameter registers. */
3211 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3212 char *initial_stack_usage_map = stack_usage_map;
3214 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3216 #ifdef REG_PARM_STACK_SPACE
3217 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3218 #endif
3220 /* By default, library functions can not throw. */
3221 flags = ECF_NOTHROW;
3223 switch (fn_type)
3225 case LCT_NORMAL:
3226 break;
3227 case LCT_CONST:
3228 flags |= ECF_CONST;
3229 break;
3230 case LCT_PURE:
3231 flags |= ECF_PURE;
3232 break;
3233 case LCT_CONST_MAKE_BLOCK:
3234 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
3235 break;
3236 case LCT_PURE_MAKE_BLOCK:
3237 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
3238 break;
3239 case LCT_NORETURN:
3240 flags |= ECF_NORETURN;
3241 break;
3242 case LCT_THROW:
3243 flags = ECF_NORETURN;
3244 break;
3245 case LCT_ALWAYS_RETURN:
3246 flags = ECF_ALWAYS_RETURN;
3247 break;
3248 case LCT_RETURNS_TWICE:
3249 flags = ECF_RETURNS_TWICE;
3250 break;
3252 fun = orgfun;
3254 /* Ensure current function's preferred stack boundary is at least
3255 what we need. */
3256 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3257 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3259 /* If this kind of value comes back in memory,
3260 decide where in memory it should come back. */
3261 if (outmode != VOIDmode)
3263 tfom = lang_hooks.types.type_for_mode (outmode, 0);
3264 if (aggregate_value_p (tfom, 0))
3266 #ifdef PCC_STATIC_STRUCT_RETURN
3267 rtx pointer_reg
3268 = hard_function_value (build_pointer_type (tfom), 0, 0);
3269 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3270 pcc_struct_value = 1;
3271 if (value == 0)
3272 value = gen_reg_rtx (outmode);
3273 #else /* not PCC_STATIC_STRUCT_RETURN */
3274 struct_value_size = GET_MODE_SIZE (outmode);
3275 if (value != 0 && MEM_P (value))
3276 mem_value = value;
3277 else
3278 mem_value = assign_temp (tfom, 0, 1, 1);
3279 #endif
3280 /* This call returns a big structure. */
3281 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3284 else
3285 tfom = void_type_node;
3287 /* ??? Unfinished: must pass the memory address as an argument. */
3289 /* Copy all the libcall-arguments out of the varargs data
3290 and into a vector ARGVEC.
3292 Compute how to pass each argument. We only support a very small subset
3293 of the full argument passing conventions to limit complexity here since
3294 library functions shouldn't have many args. */
3296 argvec = alloca ((nargs + 1) * sizeof (struct arg));
3297 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3299 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3300 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3301 #else
3302 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0, nargs);
3303 #endif
3305 args_size.constant = 0;
3306 args_size.var = 0;
3308 count = 0;
3310 /* Now we are about to start emitting insns that can be deleted
3311 if a libcall is deleted. */
3312 if (flags & ECF_LIBCALL_BLOCK)
3313 start_sequence ();
3315 push_temp_slots ();
3317 /* If there's a structure value address to be passed,
3318 either pass it in the special place, or pass it as an extra argument. */
3319 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3321 rtx addr = XEXP (mem_value, 0);
3323 nargs++;
3325 /* Make sure it is a reasonable operand for a move or push insn. */
3326 if (!REG_P (addr) && !MEM_P (addr)
3327 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3328 addr = force_operand (addr, NULL_RTX);
3330 argvec[count].value = addr;
3331 argvec[count].mode = Pmode;
3332 argvec[count].partial = 0;
3334 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3335 gcc_assert (targetm.calls.arg_partial_bytes (&args_so_far, Pmode,
3336 NULL_TREE, 1) == 0);
3338 locate_and_pad_parm (Pmode, NULL_TREE,
3339 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3341 #else
3342 argvec[count].reg != 0,
3343 #endif
3344 0, NULL_TREE, &args_size, &argvec[count].locate);
3346 if (argvec[count].reg == 0 || argvec[count].partial != 0
3347 || reg_parm_stack_space > 0)
3348 args_size.constant += argvec[count].locate.size.constant;
3350 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3352 count++;
3355 for (; count < nargs; count++)
3357 rtx val = va_arg (p, rtx);
3358 enum machine_mode mode = va_arg (p, enum machine_mode);
3360 /* We cannot convert the arg value to the mode the library wants here;
3361 must do it earlier where we know the signedness of the arg. */
3362 gcc_assert (mode != BLKmode
3363 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
3365 /* Make sure it is a reasonable operand for a move or push insn. */
3366 if (!REG_P (val) && !MEM_P (val)
3367 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3368 val = force_operand (val, NULL_RTX);
3370 if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1))
3372 rtx slot;
3373 int must_copy
3374 = !reference_callee_copied (&args_so_far, mode, NULL_TREE, 1);
3376 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3377 functions, so we have to pretend this isn't such a function. */
3378 if (flags & ECF_LIBCALL_BLOCK)
3380 rtx insns = get_insns ();
3381 end_sequence ();
3382 emit_insn (insns);
3384 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3386 /* If this was a CONST function, it is now PURE since
3387 it now reads memory. */
3388 if (flags & ECF_CONST)
3390 flags &= ~ECF_CONST;
3391 flags |= ECF_PURE;
3394 if (GET_MODE (val) == MEM && !must_copy)
3395 slot = val;
3396 else
3398 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
3399 0, 1, 1);
3400 emit_move_insn (slot, val);
3403 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3404 gen_rtx_USE (VOIDmode, slot),
3405 call_fusage);
3406 if (must_copy)
3407 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3408 gen_rtx_CLOBBER (VOIDmode,
3409 slot),
3410 call_fusage);
3412 mode = Pmode;
3413 val = force_operand (XEXP (slot, 0), NULL_RTX);
3416 argvec[count].value = val;
3417 argvec[count].mode = mode;
3419 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3421 argvec[count].partial
3422 = targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL_TREE, 1);
3424 locate_and_pad_parm (mode, NULL_TREE,
3425 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3427 #else
3428 argvec[count].reg != 0,
3429 #endif
3430 argvec[count].partial,
3431 NULL_TREE, &args_size, &argvec[count].locate);
3433 gcc_assert (!argvec[count].locate.size.var);
3435 if (argvec[count].reg == 0 || argvec[count].partial != 0
3436 || reg_parm_stack_space > 0)
3437 args_size.constant += argvec[count].locate.size.constant;
3439 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3442 /* If this machine requires an external definition for library
3443 functions, write one out. */
3444 assemble_external_libcall (fun);
3446 original_args_size = args_size;
3447 args_size.constant = (((args_size.constant
3448 + stack_pointer_delta
3449 + STACK_BYTES - 1)
3450 / STACK_BYTES
3451 * STACK_BYTES)
3452 - stack_pointer_delta);
3454 args_size.constant = MAX (args_size.constant,
3455 reg_parm_stack_space);
3457 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3458 args_size.constant -= reg_parm_stack_space;
3459 #endif
3461 if (args_size.constant > current_function_outgoing_args_size)
3462 current_function_outgoing_args_size = args_size.constant;
3464 if (ACCUMULATE_OUTGOING_ARGS)
3466 /* Since the stack pointer will never be pushed, it is possible for
3467 the evaluation of a parm to clobber something we have already
3468 written to the stack. Since most function calls on RISC machines
3469 do not use the stack, this is uncommon, but must work correctly.
3471 Therefore, we save any area of the stack that was already written
3472 and that we are using. Here we set up to do this by making a new
3473 stack usage map from the old one.
3475 Another approach might be to try to reorder the argument
3476 evaluations to avoid this conflicting stack usage. */
3478 needed = args_size.constant;
3480 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3481 /* Since we will be writing into the entire argument area, the
3482 map must be allocated for its entire size, not just the part that
3483 is the responsibility of the caller. */
3484 needed += reg_parm_stack_space;
3485 #endif
3487 #ifdef ARGS_GROW_DOWNWARD
3488 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3489 needed + 1);
3490 #else
3491 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3492 needed);
3493 #endif
3494 stack_usage_map = alloca (highest_outgoing_arg_in_use);
3496 if (initial_highest_arg_in_use)
3497 memcpy (stack_usage_map, initial_stack_usage_map,
3498 initial_highest_arg_in_use);
3500 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3501 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3502 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3503 needed = 0;
3505 /* We must be careful to use virtual regs before they're instantiated,
3506 and real regs afterwards. Loop optimization, for example, can create
3507 new libcalls after we've instantiated the virtual regs, and if we
3508 use virtuals anyway, they won't match the rtl patterns. */
3510 if (virtuals_instantiated)
3511 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3512 else
3513 argblock = virtual_outgoing_args_rtx;
3515 else
3517 if (!PUSH_ARGS)
3518 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3521 /* If we push args individually in reverse order, perform stack alignment
3522 before the first push (the last arg). */
3523 if (argblock == 0 && PUSH_ARGS_REVERSED)
3524 anti_adjust_stack (GEN_INT (args_size.constant
3525 - original_args_size.constant));
3527 if (PUSH_ARGS_REVERSED)
3529 inc = -1;
3530 argnum = nargs - 1;
3532 else
3534 inc = 1;
3535 argnum = 0;
3538 #ifdef REG_PARM_STACK_SPACE
3539 if (ACCUMULATE_OUTGOING_ARGS)
3541 /* The argument list is the property of the called routine and it
3542 may clobber it. If the fixed area has been used for previous
3543 parameters, we must save and restore it. */
3544 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3545 &low_to_save, &high_to_save);
3547 #endif
3549 /* Push the args that need to be pushed. */
3551 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3552 are to be pushed. */
3553 for (count = 0; count < nargs; count++, argnum += inc)
3555 enum machine_mode mode = argvec[argnum].mode;
3556 rtx val = argvec[argnum].value;
3557 rtx reg = argvec[argnum].reg;
3558 int partial = argvec[argnum].partial;
3559 int lower_bound = 0, upper_bound = 0, i;
3561 if (! (reg != 0 && partial == 0))
3563 if (ACCUMULATE_OUTGOING_ARGS)
3565 /* If this is being stored into a pre-allocated, fixed-size,
3566 stack area, save any previous data at that location. */
3568 #ifdef ARGS_GROW_DOWNWARD
3569 /* stack_slot is negative, but we want to index stack_usage_map
3570 with positive values. */
3571 upper_bound = -argvec[argnum].locate.offset.constant + 1;
3572 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3573 #else
3574 lower_bound = argvec[argnum].locate.offset.constant;
3575 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3576 #endif
3578 i = lower_bound;
3579 /* Don't worry about things in the fixed argument area;
3580 it has already been saved. */
3581 if (i < reg_parm_stack_space)
3582 i = reg_parm_stack_space;
3583 while (i < upper_bound && stack_usage_map[i] == 0)
3584 i++;
3586 if (i < upper_bound)
3588 /* We need to make a save area. */
3589 unsigned int size
3590 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3591 enum machine_mode save_mode
3592 = mode_for_size (size, MODE_INT, 1);
3593 rtx adr
3594 = plus_constant (argblock,
3595 argvec[argnum].locate.offset.constant);
3596 rtx stack_area
3597 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
3599 if (save_mode == BLKmode)
3601 argvec[argnum].save_area
3602 = assign_stack_temp (BLKmode,
3603 argvec[argnum].locate.size.constant,
3606 emit_block_move (validize_mem (argvec[argnum].save_area),
3607 stack_area,
3608 GEN_INT (argvec[argnum].locate.size.constant),
3609 BLOCK_OP_CALL_PARM);
3611 else
3613 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3615 emit_move_insn (argvec[argnum].save_area, stack_area);
3620 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
3621 partial, reg, 0, argblock,
3622 GEN_INT (argvec[argnum].locate.offset.constant),
3623 reg_parm_stack_space,
3624 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
3626 /* Now mark the segment we just used. */
3627 if (ACCUMULATE_OUTGOING_ARGS)
3628 for (i = lower_bound; i < upper_bound; i++)
3629 stack_usage_map[i] = 1;
3631 NO_DEFER_POP;
3635 /* If we pushed args in forward order, perform stack alignment
3636 after pushing the last arg. */
3637 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3638 anti_adjust_stack (GEN_INT (args_size.constant
3639 - original_args_size.constant));
3641 if (PUSH_ARGS_REVERSED)
3642 argnum = nargs - 1;
3643 else
3644 argnum = 0;
3646 fun = prepare_call_address (fun, NULL, &call_fusage, 0, 0);
3648 /* Now load any reg parms into their regs. */
3650 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3651 are to be pushed. */
3652 for (count = 0; count < nargs; count++, argnum += inc)
3654 enum machine_mode mode = argvec[argnum].mode;
3655 rtx val = argvec[argnum].value;
3656 rtx reg = argvec[argnum].reg;
3657 int partial = argvec[argnum].partial;
3659 /* Handle calls that pass values in multiple non-contiguous
3660 locations. The PA64 has examples of this for library calls. */
3661 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3662 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3663 else if (reg != 0 && partial == 0)
3664 emit_move_insn (reg, val);
3666 NO_DEFER_POP;
3669 /* Any regs containing parms remain in use through the call. */
3670 for (count = 0; count < nargs; count++)
3672 rtx reg = argvec[count].reg;
3673 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3674 use_group_regs (&call_fusage, reg);
3675 else if (reg != 0)
3676 use_reg (&call_fusage, reg);
3679 /* Pass the function the address in which to return a structure value. */
3680 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3682 emit_move_insn (struct_value,
3683 force_reg (Pmode,
3684 force_operand (XEXP (mem_value, 0),
3685 NULL_RTX)));
3686 if (REG_P (struct_value))
3687 use_reg (&call_fusage, struct_value);
3690 /* Don't allow popping to be deferred, since then
3691 cse'ing of library calls could delete a call and leave the pop. */
3692 NO_DEFER_POP;
3693 valreg = (mem_value == 0 && outmode != VOIDmode
3694 ? hard_libcall_value (outmode) : NULL_RTX);
3696 /* Stack must be properly aligned now. */
3697 gcc_assert (!(stack_pointer_delta
3698 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
3700 before_call = get_last_insn ();
3702 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3703 will set inhibit_defer_pop to that value. */
3704 /* The return type is needed to decide how many bytes the function pops.
3705 Signedness plays no role in that, so for simplicity, we pretend it's
3706 always signed. We also assume that the list of arguments passed has
3707 no impact, so we pretend it is unknown. */
3709 emit_call_1 (fun, NULL,
3710 get_identifier (XSTR (orgfun, 0)),
3711 build_function_type (tfom, NULL_TREE),
3712 original_args_size.constant, args_size.constant,
3713 struct_value_size,
3714 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3715 valreg,
3716 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
3718 /* For calls to `setjmp', etc., inform flow.c it should complain
3719 if nonvolatile values are live. For functions that cannot return,
3720 inform flow that control does not fall through. */
3722 if (flags & ECF_NORETURN)
3724 /* The barrier note must be emitted
3725 immediately after the CALL_INSN. Some ports emit more than
3726 just a CALL_INSN above, so we must search for it here. */
3728 rtx last = get_last_insn ();
3729 while (!CALL_P (last))
3731 last = PREV_INSN (last);
3732 /* There was no CALL_INSN? */
3733 gcc_assert (last != before_call);
3736 emit_barrier_after (last);
3739 /* Now restore inhibit_defer_pop to its actual original value. */
3740 OK_DEFER_POP;
3742 /* If call is cse'able, make appropriate pair of reg-notes around it.
3743 Test valreg so we don't crash; may safely ignore `const'
3744 if return type is void. Disable for PARALLEL return values, because
3745 we have no way to move such values into a pseudo register. */
3746 if (flags & ECF_LIBCALL_BLOCK)
3748 rtx insns;
3750 if (valreg == 0)
3752 insns = get_insns ();
3753 end_sequence ();
3754 emit_insn (insns);
3756 else
3758 rtx note = 0;
3759 rtx temp;
3760 int i;
3762 if (GET_CODE (valreg) == PARALLEL)
3764 temp = gen_reg_rtx (outmode);
3765 emit_group_store (temp, valreg, NULL_TREE,
3766 GET_MODE_SIZE (outmode));
3767 valreg = temp;
3770 temp = gen_reg_rtx (GET_MODE (valreg));
3772 /* Construct an "equal form" for the value which mentions all the
3773 arguments in order as well as the function name. */
3774 for (i = 0; i < nargs; i++)
3775 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
3776 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
3778 insns = get_insns ();
3779 end_sequence ();
3781 if (flags & ECF_PURE)
3782 note = gen_rtx_EXPR_LIST (VOIDmode,
3783 gen_rtx_USE (VOIDmode,
3784 gen_rtx_MEM (BLKmode,
3785 gen_rtx_SCRATCH (VOIDmode))),
3786 note);
3788 emit_libcall_block (insns, temp, valreg, note);
3790 valreg = temp;
3793 pop_temp_slots ();
3795 /* Copy the value to the right place. */
3796 if (outmode != VOIDmode && retval)
3798 if (mem_value)
3800 if (value == 0)
3801 value = mem_value;
3802 if (value != mem_value)
3803 emit_move_insn (value, mem_value);
3805 else if (GET_CODE (valreg) == PARALLEL)
3807 if (value == 0)
3808 value = gen_reg_rtx (outmode);
3809 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
3811 else if (value != 0)
3812 emit_move_insn (value, valreg);
3813 else
3814 value = valreg;
3817 if (ACCUMULATE_OUTGOING_ARGS)
3819 #ifdef REG_PARM_STACK_SPACE
3820 if (save_area)
3821 restore_fixed_argument_area (save_area, argblock,
3822 high_to_save, low_to_save);
3823 #endif
3825 /* If we saved any argument areas, restore them. */
3826 for (count = 0; count < nargs; count++)
3827 if (argvec[count].save_area)
3829 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3830 rtx adr = plus_constant (argblock,
3831 argvec[count].locate.offset.constant);
3832 rtx stack_area = gen_rtx_MEM (save_mode,
3833 memory_address (save_mode, adr));
3835 if (save_mode == BLKmode)
3836 emit_block_move (stack_area,
3837 validize_mem (argvec[count].save_area),
3838 GEN_INT (argvec[count].locate.size.constant),
3839 BLOCK_OP_CALL_PARM);
3840 else
3841 emit_move_insn (stack_area, argvec[count].save_area);
3844 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3845 stack_usage_map = initial_stack_usage_map;
3848 return value;
3852 /* Output a library call to function FUN (a SYMBOL_REF rtx)
3853 (emitting the queue unless NO_QUEUE is nonzero),
3854 for a value of mode OUTMODE,
3855 with NARGS different arguments, passed as alternating rtx values
3856 and machine_modes to convert them to.
3858 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
3859 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
3860 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
3861 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
3862 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
3863 or other LCT_ value for other types of library calls. */
3865 void
3866 emit_library_call (rtx orgfun, enum libcall_type fn_type,
3867 enum machine_mode outmode, int nargs, ...)
3869 va_list p;
3871 va_start (p, nargs);
3872 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
3873 va_end (p);
3876 /* Like emit_library_call except that an extra argument, VALUE,
3877 comes second and says where to store the result.
3878 (If VALUE is zero, this function chooses a convenient way
3879 to return the value.
3881 This function returns an rtx for where the value is to be found.
3882 If VALUE is nonzero, VALUE is returned. */
3885 emit_library_call_value (rtx orgfun, rtx value,
3886 enum libcall_type fn_type,
3887 enum machine_mode outmode, int nargs, ...)
3889 rtx result;
3890 va_list p;
3892 va_start (p, nargs);
3893 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
3894 nargs, p);
3895 va_end (p);
3897 return result;
3900 /* Store a single argument for a function call
3901 into the register or memory area where it must be passed.
3902 *ARG describes the argument value and where to pass it.
3904 ARGBLOCK is the address of the stack-block for all the arguments,
3905 or 0 on a machine where arguments are pushed individually.
3907 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
3908 so must be careful about how the stack is used.
3910 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
3911 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
3912 that we need not worry about saving and restoring the stack.
3914 FNDECL is the declaration of the function we are calling.
3916 Return nonzero if this arg should cause sibcall failure,
3917 zero otherwise. */
3919 static int
3920 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
3921 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
3923 tree pval = arg->tree_value;
3924 rtx reg = 0;
3925 int partial = 0;
3926 int used = 0;
3927 int i, lower_bound = 0, upper_bound = 0;
3928 int sibcall_failure = 0;
3930 if (TREE_CODE (pval) == ERROR_MARK)
3931 return 1;
3933 /* Push a new temporary level for any temporaries we make for
3934 this argument. */
3935 push_temp_slots ();
3937 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
3939 /* If this is being stored into a pre-allocated, fixed-size, stack area,
3940 save any previous data at that location. */
3941 if (argblock && ! variable_size && arg->stack)
3943 #ifdef ARGS_GROW_DOWNWARD
3944 /* stack_slot is negative, but we want to index stack_usage_map
3945 with positive values. */
3946 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3947 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
3948 else
3949 upper_bound = 0;
3951 lower_bound = upper_bound - arg->locate.size.constant;
3952 #else
3953 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
3954 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
3955 else
3956 lower_bound = 0;
3958 upper_bound = lower_bound + arg->locate.size.constant;
3959 #endif
3961 i = lower_bound;
3962 /* Don't worry about things in the fixed argument area;
3963 it has already been saved. */
3964 if (i < reg_parm_stack_space)
3965 i = reg_parm_stack_space;
3966 while (i < upper_bound && stack_usage_map[i] == 0)
3967 i++;
3969 if (i < upper_bound)
3971 /* We need to make a save area. */
3972 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
3973 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
3974 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
3975 rtx stack_area = gen_rtx_MEM (save_mode, adr);
3977 if (save_mode == BLKmode)
3979 tree ot = TREE_TYPE (arg->tree_value);
3980 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
3981 | TYPE_QUAL_CONST));
3983 arg->save_area = assign_temp (nt, 0, 1, 1);
3984 preserve_temp_slots (arg->save_area);
3985 emit_block_move (validize_mem (arg->save_area), stack_area,
3986 expr_size (arg->tree_value),
3987 BLOCK_OP_CALL_PARM);
3989 else
3991 arg->save_area = gen_reg_rtx (save_mode);
3992 emit_move_insn (arg->save_area, stack_area);
3998 /* If this isn't going to be placed on both the stack and in registers,
3999 set up the register and number of words. */
4000 if (! arg->pass_on_stack)
4002 if (flags & ECF_SIBCALL)
4003 reg = arg->tail_call_reg;
4004 else
4005 reg = arg->reg;
4006 partial = arg->partial;
4009 /* Being passed entirely in a register. We shouldn't be called in
4010 this case. */
4011 gcc_assert (reg == 0 || partial != 0);
4013 /* If this arg needs special alignment, don't load the registers
4014 here. */
4015 if (arg->n_aligned_regs != 0)
4016 reg = 0;
4018 /* If this is being passed partially in a register, we can't evaluate
4019 it directly into its stack slot. Otherwise, we can. */
4020 if (arg->value == 0)
4022 /* stack_arg_under_construction is nonzero if a function argument is
4023 being evaluated directly into the outgoing argument list and
4024 expand_call must take special action to preserve the argument list
4025 if it is called recursively.
4027 For scalar function arguments stack_usage_map is sufficient to
4028 determine which stack slots must be saved and restored. Scalar
4029 arguments in general have pass_on_stack == 0.
4031 If this argument is initialized by a function which takes the
4032 address of the argument (a C++ constructor or a C function
4033 returning a BLKmode structure), then stack_usage_map is
4034 insufficient and expand_call must push the stack around the
4035 function call. Such arguments have pass_on_stack == 1.
4037 Note that it is always safe to set stack_arg_under_construction,
4038 but this generates suboptimal code if set when not needed. */
4040 if (arg->pass_on_stack)
4041 stack_arg_under_construction++;
4043 arg->value = expand_expr (pval,
4044 (partial
4045 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4046 ? NULL_RTX : arg->stack,
4047 VOIDmode, EXPAND_STACK_PARM);
4049 /* If we are promoting object (or for any other reason) the mode
4050 doesn't agree, convert the mode. */
4052 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4053 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4054 arg->value, arg->unsignedp);
4056 if (arg->pass_on_stack)
4057 stack_arg_under_construction--;
4060 /* Check for overlap with already clobbered argument area. */
4061 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4063 int i = -1;
4064 unsigned HOST_WIDE_INT k;
4065 rtx x = arg->value;
4067 if (XEXP (x, 0) == current_function_internal_arg_pointer)
4068 i = 0;
4069 else if (GET_CODE (XEXP (x, 0)) == PLUS
4070 && XEXP (XEXP (x, 0), 0) ==
4071 current_function_internal_arg_pointer
4072 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
4073 i = INTVAL (XEXP (XEXP (x, 0), 1));
4074 else
4075 i = -1;
4077 if (i >= 0)
4079 #ifdef ARGS_GROW_DOWNWARD
4080 i = -i - arg->locate.size.constant;
4081 #endif
4082 if (arg->locate.size.constant > 0)
4084 unsigned HOST_WIDE_INT sc = arg->locate.size.constant;
4086 for (k = 0; k < sc; k++)
4087 if (i + k < stored_args_map->n_bits
4088 && TEST_BIT (stored_args_map, i + k))
4090 sibcall_failure = 1;
4091 break;
4097 /* Don't allow anything left on stack from computation
4098 of argument to alloca. */
4099 if (flags & ECF_MAY_BE_ALLOCA)
4100 do_pending_stack_adjust ();
4102 if (arg->value == arg->stack)
4103 /* If the value is already in the stack slot, we are done. */
4105 else if (arg->mode != BLKmode)
4107 int size;
4109 /* Argument is a scalar, not entirely passed in registers.
4110 (If part is passed in registers, arg->partial says how much
4111 and emit_push_insn will take care of putting it there.)
4113 Push it, and if its size is less than the
4114 amount of space allocated to it,
4115 also bump stack pointer by the additional space.
4116 Note that in C the default argument promotions
4117 will prevent such mismatches. */
4119 size = GET_MODE_SIZE (arg->mode);
4120 /* Compute how much space the push instruction will push.
4121 On many machines, pushing a byte will advance the stack
4122 pointer by a halfword. */
4123 #ifdef PUSH_ROUNDING
4124 size = PUSH_ROUNDING (size);
4125 #endif
4126 used = size;
4128 /* Compute how much space the argument should get:
4129 round up to a multiple of the alignment for arguments. */
4130 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4131 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4132 / (PARM_BOUNDARY / BITS_PER_UNIT))
4133 * (PARM_BOUNDARY / BITS_PER_UNIT));
4135 /* This isn't already where we want it on the stack, so put it there.
4136 This can either be done with push or copy insns. */
4137 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4138 PARM_BOUNDARY, partial, reg, used - size, argblock,
4139 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4140 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4142 /* Unless this is a partially-in-register argument, the argument is now
4143 in the stack. */
4144 if (partial == 0)
4145 arg->value = arg->stack;
4147 else
4149 /* BLKmode, at least partly to be pushed. */
4151 unsigned int parm_align;
4152 int excess;
4153 rtx size_rtx;
4155 /* Pushing a nonscalar.
4156 If part is passed in registers, PARTIAL says how much
4157 and emit_push_insn will take care of putting it there. */
4159 /* Round its size up to a multiple
4160 of the allocation unit for arguments. */
4162 if (arg->locate.size.var != 0)
4164 excess = 0;
4165 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4167 else
4169 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4170 for BLKmode is careful to avoid it. */
4171 excess = (arg->locate.size.constant
4172 - int_size_in_bytes (TREE_TYPE (pval))
4173 + partial);
4174 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4175 NULL_RTX, TYPE_MODE (sizetype), 0);
4178 parm_align = arg->locate.boundary;
4180 /* When an argument is padded down, the block is aligned to
4181 PARM_BOUNDARY, but the actual argument isn't. */
4182 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4184 if (arg->locate.size.var)
4185 parm_align = BITS_PER_UNIT;
4186 else if (excess)
4188 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4189 parm_align = MIN (parm_align, excess_align);
4193 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4195 /* emit_push_insn might not work properly if arg->value and
4196 argblock + arg->locate.offset areas overlap. */
4197 rtx x = arg->value;
4198 int i = 0;
4200 if (XEXP (x, 0) == current_function_internal_arg_pointer
4201 || (GET_CODE (XEXP (x, 0)) == PLUS
4202 && XEXP (XEXP (x, 0), 0) ==
4203 current_function_internal_arg_pointer
4204 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4206 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4207 i = INTVAL (XEXP (XEXP (x, 0), 1));
4209 /* expand_call should ensure this. */
4210 gcc_assert (!arg->locate.offset.var
4211 && GET_CODE (size_rtx) == CONST_INT);
4213 if (arg->locate.offset.constant > i)
4215 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4216 sibcall_failure = 1;
4218 else if (arg->locate.offset.constant < i)
4220 if (i < arg->locate.offset.constant + INTVAL (size_rtx))
4221 sibcall_failure = 1;
4226 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4227 parm_align, partial, reg, excess, argblock,
4228 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4229 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4231 /* Unless this is a partially-in-register argument, the argument is now
4232 in the stack.
4234 ??? Unlike the case above, in which we want the actual
4235 address of the data, so that we can load it directly into a
4236 register, here we want the address of the stack slot, so that
4237 it's properly aligned for word-by-word copying or something
4238 like that. It's not clear that this is always correct. */
4239 if (partial == 0)
4240 arg->value = arg->stack_slot;
4243 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
4245 tree type = TREE_TYPE (arg->tree_value);
4246 arg->parallel_value
4247 = emit_group_load_into_temps (arg->reg, arg->value, type,
4248 int_size_in_bytes (type));
4251 /* Mark all slots this store used. */
4252 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4253 && argblock && ! variable_size && arg->stack)
4254 for (i = lower_bound; i < upper_bound; i++)
4255 stack_usage_map[i] = 1;
4257 /* Once we have pushed something, pops can't safely
4258 be deferred during the rest of the arguments. */
4259 NO_DEFER_POP;
4261 /* Free any temporary slots made in processing this argument. Show
4262 that we might have taken the address of something and pushed that
4263 as an operand. */
4264 preserve_temp_slots (NULL_RTX);
4265 free_temp_slots ();
4266 pop_temp_slots ();
4268 return sibcall_failure;
4271 /* Nonzero if we do not know how to pass TYPE solely in registers. */
4273 bool
4274 must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
4275 tree type)
4277 if (!type)
4278 return false;
4280 /* If the type has variable size... */
4281 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4282 return true;
4284 /* If the type is marked as addressable (it is required
4285 to be constructed into the stack)... */
4286 if (TREE_ADDRESSABLE (type))
4287 return true;
4289 return false;
4292 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
4293 takes trailing padding of a structure into account. */
4294 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
4296 bool
4297 must_pass_in_stack_var_size_or_pad (enum machine_mode mode, tree type)
4299 if (!type)
4300 return false;
4302 /* If the type has variable size... */
4303 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4304 return true;
4306 /* If the type is marked as addressable (it is required
4307 to be constructed into the stack)... */
4308 if (TREE_ADDRESSABLE (type))
4309 return true;
4311 /* If the padding and mode of the type is such that a copy into
4312 a register would put it into the wrong part of the register. */
4313 if (mode == BLKmode
4314 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4315 && (FUNCTION_ARG_PADDING (mode, type)
4316 == (BYTES_BIG_ENDIAN ? upward : downward)))
4317 return true;
4319 return false;