Fix 50988 testsuite failures
[official-gcc.git] / gcc / calls.c
blobc8d0b8499853bfb7462b64d8b1624a73517ee06f
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 2011 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "flags.h"
30 #include "expr.h"
31 #include "optabs.h"
32 #include "libfuncs.h"
33 #include "function.h"
34 #include "regs.h"
35 #include "diagnostic-core.h"
36 #include "output.h"
37 #include "tm_p.h"
38 #include "timevar.h"
39 #include "sbitmap.h"
40 #include "langhooks.h"
41 #include "target.h"
42 #include "cgraph.h"
43 #include "except.h"
44 #include "dbgcnt.h"
45 #include "tree-flow.h"
47 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
48 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
50 /* Data structure and subroutines used within expand_call. */
52 struct arg_data
54 /* Tree node for this argument. */
55 tree tree_value;
56 /* Mode for value; TYPE_MODE unless promoted. */
57 enum machine_mode mode;
58 /* Current RTL value for argument, or 0 if it isn't precomputed. */
59 rtx value;
60 /* Initially-compute RTL value for argument; only for const functions. */
61 rtx initial_value;
62 /* Register to pass this argument in, 0 if passed on stack, or an
63 PARALLEL if the arg is to be copied into multiple non-contiguous
64 registers. */
65 rtx reg;
66 /* Register to pass this argument in when generating tail call sequence.
67 This is not the same register as for normal calls on machines with
68 register windows. */
69 rtx tail_call_reg;
70 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
71 form for emit_group_move. */
72 rtx parallel_value;
73 /* If REG was promoted from the actual mode of the argument expression,
74 indicates whether the promotion is sign- or zero-extended. */
75 int unsignedp;
76 /* Number of bytes to put in registers. 0 means put the whole arg
77 in registers. Also 0 if not passed in registers. */
78 int partial;
79 /* Nonzero if argument must be passed on stack.
80 Note that some arguments may be passed on the stack
81 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
82 pass_on_stack identifies arguments that *cannot* go in registers. */
83 int pass_on_stack;
84 /* Some fields packaged up for locate_and_pad_parm. */
85 struct locate_and_pad_arg_data locate;
86 /* Location on the stack at which parameter should be stored. The store
87 has already been done if STACK == VALUE. */
88 rtx stack;
89 /* Location on the stack of the start of this argument slot. This can
90 differ from STACK if this arg pads downward. This location is known
91 to be aligned to TARGET_FUNCTION_ARG_BOUNDARY. */
92 rtx stack_slot;
93 /* Place that this stack area has been saved, if needed. */
94 rtx save_area;
95 /* If an argument's alignment does not permit direct copying into registers,
96 copy in smaller-sized pieces into pseudos. These are stored in a
97 block pointed to by this field. The next field says how many
98 word-sized pseudos we made. */
99 rtx *aligned_regs;
100 int n_aligned_regs;
103 /* A vector of one char per byte of stack space. A byte if nonzero if
104 the corresponding stack location has been used.
105 This vector is used to prevent a function call within an argument from
106 clobbering any stack already set up. */
107 static char *stack_usage_map;
109 /* Size of STACK_USAGE_MAP. */
110 static int highest_outgoing_arg_in_use;
112 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
113 stack location's tail call argument has been already stored into the stack.
114 This bitmap is used to prevent sibling call optimization if function tries
115 to use parent's incoming argument slots when they have been already
116 overwritten with tail call arguments. */
117 static sbitmap stored_args_map;
119 /* stack_arg_under_construction is nonzero when an argument may be
120 initialized with a constructor call (including a C function that
121 returns a BLKmode struct) and expand_call must take special action
122 to make sure the object being constructed does not overlap the
123 argument list for the constructor call. */
124 static int stack_arg_under_construction;
126 static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
127 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
128 cumulative_args_t);
129 static void precompute_register_parameters (int, struct arg_data *, int *);
130 static int store_one_arg (struct arg_data *, rtx, int, int, int);
131 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
132 static int finalize_must_preallocate (int, int, struct arg_data *,
133 struct args_size *);
134 static void precompute_arguments (int, struct arg_data *);
135 static int compute_argument_block_size (int, struct args_size *, tree, tree, int);
136 static void initialize_argument_information (int, struct arg_data *,
137 struct args_size *, int,
138 tree, tree,
139 tree, tree, cumulative_args_t, int,
140 rtx *, int *, int *, int *,
141 bool *, bool);
142 static void compute_argument_addresses (struct arg_data *, rtx, int);
143 static rtx rtx_for_function_call (tree, tree);
144 static void load_register_parameters (struct arg_data *, int, rtx *, int,
145 int, int *);
146 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
147 enum machine_mode, int, va_list);
148 static int special_function_p (const_tree, int);
149 static int check_sibcall_argument_overlap_1 (rtx);
150 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
152 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
153 unsigned int);
154 static tree split_complex_types (tree);
156 #ifdef REG_PARM_STACK_SPACE
157 static rtx save_fixed_argument_area (int, rtx, int *, int *);
158 static void restore_fixed_argument_area (rtx, rtx, int, int);
159 #endif
161 /* Force FUNEXP into a form suitable for the address of a CALL,
162 and return that as an rtx. Also load the static chain register
163 if FNDECL is a nested function.
165 CALL_FUSAGE points to a variable holding the prospective
166 CALL_INSN_FUNCTION_USAGE information. */
169 prepare_call_address (tree fndecl, rtx funexp, rtx static_chain_value,
170 rtx *call_fusage, int reg_parm_seen, int sibcallp)
172 /* Make a valid memory address and copy constants through pseudo-regs,
173 but not for a constant address if -fno-function-cse. */
174 if (GET_CODE (funexp) != SYMBOL_REF)
175 /* If we are using registers for parameters, force the
176 function address into a register now. */
177 funexp = ((reg_parm_seen
178 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
179 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
180 : memory_address (FUNCTION_MODE, funexp));
181 else if (! sibcallp)
183 #ifndef NO_FUNCTION_CSE
184 if (optimize && ! flag_no_function_cse)
185 funexp = force_reg (Pmode, funexp);
186 #endif
189 if (static_chain_value != 0)
191 rtx chain;
193 gcc_assert (fndecl);
194 chain = targetm.calls.static_chain (fndecl, false);
195 static_chain_value = convert_memory_address (Pmode, static_chain_value);
197 emit_move_insn (chain, static_chain_value);
198 if (REG_P (chain))
199 use_reg (call_fusage, chain);
202 return funexp;
205 /* Generate instructions to call function FUNEXP,
206 and optionally pop the results.
207 The CALL_INSN is the first insn generated.
209 FNDECL is the declaration node of the function. This is given to the
210 hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
211 its own args.
213 FUNTYPE is the data type of the function. This is given to the hook
214 TARGET_RETURN_POPS_ARGS to determine whether this function pops its
215 own args. We used to allow an identifier for library functions, but
216 that doesn't work when the return type is an aggregate type and the
217 calling convention says that the pointer to this aggregate is to be
218 popped by the callee.
220 STACK_SIZE is the number of bytes of arguments on the stack,
221 ROUNDED_STACK_SIZE is that number rounded up to
222 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
223 both to put into the call insn and to generate explicit popping
224 code if necessary.
226 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
227 It is zero if this call doesn't want a structure value.
229 NEXT_ARG_REG is the rtx that results from executing
230 targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true)
231 just after all the args have had their registers assigned.
232 This could be whatever you like, but normally it is the first
233 arg-register beyond those used for args in this call,
234 or 0 if all the arg-registers are used in this call.
235 It is passed on to `gen_call' so you can put this info in the call insn.
237 VALREG is a hard register in which a value is returned,
238 or 0 if the call does not return a value.
240 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
241 the args to this call were processed.
242 We restore `inhibit_defer_pop' to that value.
244 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
245 denote registers used by the called function. */
247 static void
248 emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
249 tree funtype ATTRIBUTE_UNUSED,
250 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
251 HOST_WIDE_INT rounded_stack_size,
252 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
253 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
254 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
255 cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
257 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
258 rtx call_insn, call, funmem;
259 int already_popped = 0;
260 HOST_WIDE_INT n_popped
261 = targetm.calls.return_pops_args (fndecl, funtype, stack_size);
263 #ifdef CALL_POPS_ARGS
264 n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
265 #endif
267 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
268 and we don't want to load it into a register as an optimization,
269 because prepare_call_address already did it if it should be done. */
270 if (GET_CODE (funexp) != SYMBOL_REF)
271 funexp = memory_address (FUNCTION_MODE, funexp);
273 funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
274 if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
276 tree t = fndecl;
278 /* Although a built-in FUNCTION_DECL and its non-__builtin
279 counterpart compare equal and get a shared mem_attrs, they
280 produce different dump output in compare-debug compilations,
281 if an entry gets garbage collected in one compilation, then
282 adds a different (but equivalent) entry, while the other
283 doesn't run the garbage collector at the same spot and then
284 shares the mem_attr with the equivalent entry. */
285 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
287 tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
288 if (t2)
289 t = t2;
292 set_mem_expr (funmem, t);
294 else if (fntree)
295 set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
297 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
298 if ((ecf_flags & ECF_SIBCALL)
299 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
300 && (n_popped > 0 || stack_size == 0))
302 rtx n_pop = GEN_INT (n_popped);
303 rtx pat;
305 /* If this subroutine pops its own args, record that in the call insn
306 if possible, for the sake of frame pointer elimination. */
308 if (valreg)
309 pat = GEN_SIBCALL_VALUE_POP (valreg, funmem, rounded_stack_size_rtx,
310 next_arg_reg, n_pop);
311 else
312 pat = GEN_SIBCALL_POP (funmem, rounded_stack_size_rtx, next_arg_reg,
313 n_pop);
315 emit_call_insn (pat);
316 already_popped = 1;
318 else
319 #endif
321 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
322 /* If the target has "call" or "call_value" insns, then prefer them
323 if no arguments are actually popped. If the target does not have
324 "call" or "call_value" insns, then we must use the popping versions
325 even if the call has no arguments to pop. */
326 #if defined (HAVE_call) && defined (HAVE_call_value)
327 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
328 && n_popped > 0)
329 #else
330 if (HAVE_call_pop && HAVE_call_value_pop)
331 #endif
333 rtx n_pop = GEN_INT (n_popped);
334 rtx pat;
336 /* If this subroutine pops its own args, record that in the call insn
337 if possible, for the sake of frame pointer elimination. */
339 if (valreg)
340 pat = GEN_CALL_VALUE_POP (valreg, funmem, rounded_stack_size_rtx,
341 next_arg_reg, n_pop);
342 else
343 pat = GEN_CALL_POP (funmem, rounded_stack_size_rtx, next_arg_reg,
344 n_pop);
346 emit_call_insn (pat);
347 already_popped = 1;
349 else
350 #endif
352 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
353 if ((ecf_flags & ECF_SIBCALL)
354 && HAVE_sibcall && HAVE_sibcall_value)
356 if (valreg)
357 emit_call_insn (GEN_SIBCALL_VALUE (valreg, funmem,
358 rounded_stack_size_rtx,
359 next_arg_reg, NULL_RTX));
360 else
361 emit_call_insn (GEN_SIBCALL (funmem, rounded_stack_size_rtx,
362 next_arg_reg,
363 GEN_INT (struct_value_size)));
365 else
366 #endif
368 #if defined (HAVE_call) && defined (HAVE_call_value)
369 if (HAVE_call && HAVE_call_value)
371 if (valreg)
372 emit_call_insn (GEN_CALL_VALUE (valreg, funmem, rounded_stack_size_rtx,
373 next_arg_reg, NULL_RTX));
374 else
375 emit_call_insn (GEN_CALL (funmem, rounded_stack_size_rtx, next_arg_reg,
376 GEN_INT (struct_value_size)));
378 else
379 #endif
380 gcc_unreachable ();
382 /* Find the call we just emitted. */
383 call_insn = last_call_insn ();
385 /* Some target create a fresh MEM instead of reusing the one provided
386 above. Set its MEM_EXPR. */
387 call = PATTERN (call_insn);
388 if (GET_CODE (call) == PARALLEL)
389 call = XVECEXP (call, 0, 0);
390 if (GET_CODE (call) == SET)
391 call = SET_SRC (call);
392 if (GET_CODE (call) == CALL
393 && MEM_P (XEXP (call, 0))
394 && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
395 && MEM_EXPR (funmem) != NULL_TREE)
396 set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
398 /* Put the register usage information there. */
399 add_function_usage_to (call_insn, call_fusage);
401 /* If this is a const call, then set the insn's unchanging bit. */
402 if (ecf_flags & ECF_CONST)
403 RTL_CONST_CALL_P (call_insn) = 1;
405 /* If this is a pure call, then set the insn's unchanging bit. */
406 if (ecf_flags & ECF_PURE)
407 RTL_PURE_CALL_P (call_insn) = 1;
409 /* If this is a const call, then set the insn's unchanging bit. */
410 if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
411 RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
413 /* Create a nothrow REG_EH_REGION note, if needed. */
414 make_reg_eh_region_note (call_insn, ecf_flags, 0);
416 if (ecf_flags & ECF_NORETURN)
417 add_reg_note (call_insn, REG_NORETURN, const0_rtx);
419 if (ecf_flags & ECF_RETURNS_TWICE)
421 add_reg_note (call_insn, REG_SETJMP, const0_rtx);
422 cfun->calls_setjmp = 1;
425 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
427 /* Restore this now, so that we do defer pops for this call's args
428 if the context of the call as a whole permits. */
429 inhibit_defer_pop = old_inhibit_defer_pop;
431 if (n_popped > 0)
433 if (!already_popped)
434 CALL_INSN_FUNCTION_USAGE (call_insn)
435 = gen_rtx_EXPR_LIST (VOIDmode,
436 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
437 CALL_INSN_FUNCTION_USAGE (call_insn));
438 rounded_stack_size -= n_popped;
439 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
440 stack_pointer_delta -= n_popped;
442 add_reg_note (call_insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
444 /* If popup is needed, stack realign must use DRAP */
445 if (SUPPORTS_STACK_ALIGNMENT)
446 crtl->need_drap = true;
449 if (!ACCUMULATE_OUTGOING_ARGS)
451 /* If returning from the subroutine does not automatically pop the args,
452 we need an instruction to pop them sooner or later.
453 Perhaps do it now; perhaps just record how much space to pop later.
455 If returning from the subroutine does pop the args, indicate that the
456 stack pointer will be changed. */
458 if (rounded_stack_size != 0)
460 if (ecf_flags & ECF_NORETURN)
461 /* Just pretend we did the pop. */
462 stack_pointer_delta -= rounded_stack_size;
463 else if (flag_defer_pop && inhibit_defer_pop == 0
464 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
465 pending_stack_adjust += rounded_stack_size;
466 else
467 adjust_stack (rounded_stack_size_rtx);
470 /* When we accumulate outgoing args, we must avoid any stack manipulations.
471 Restore the stack pointer to its original value now. Usually
472 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
473 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
474 popping variants of functions exist as well.
476 ??? We may optimize similar to defer_pop above, but it is
477 probably not worthwhile.
479 ??? It will be worthwhile to enable combine_stack_adjustments even for
480 such machines. */
481 else if (n_popped)
482 anti_adjust_stack (GEN_INT (n_popped));
485 /* Determine if the function identified by NAME and FNDECL is one with
486 special properties we wish to know about.
488 For example, if the function might return more than one time (setjmp), then
489 set RETURNS_TWICE to a nonzero value.
491 Similarly set NORETURN if the function is in the longjmp family.
493 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
494 space from the stack such as alloca. */
496 static int
497 special_function_p (const_tree fndecl, int flags)
499 if (fndecl && DECL_NAME (fndecl)
500 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
501 /* Exclude functions not at the file scope, or not `extern',
502 since they are not the magic functions we would otherwise
503 think they are.
504 FIXME: this should be handled with attributes, not with this
505 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
506 because you can declare fork() inside a function if you
507 wish. */
508 && (DECL_CONTEXT (fndecl) == NULL_TREE
509 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
510 && TREE_PUBLIC (fndecl))
512 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
513 const char *tname = name;
515 /* We assume that alloca will always be called by name. It
516 makes no sense to pass it as a pointer-to-function to
517 anything that does not understand its behavior. */
518 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
519 && name[0] == 'a'
520 && ! strcmp (name, "alloca"))
521 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
522 && name[0] == '_'
523 && ! strcmp (name, "__builtin_alloca"))))
524 flags |= ECF_MAY_BE_ALLOCA;
526 /* Disregard prefix _, __, __x or __builtin_. */
527 if (name[0] == '_')
529 if (name[1] == '_'
530 && name[2] == 'b'
531 && !strncmp (name + 3, "uiltin_", 7))
532 tname += 10;
533 else if (name[1] == '_' && name[2] == 'x')
534 tname += 3;
535 else if (name[1] == '_')
536 tname += 2;
537 else
538 tname += 1;
541 if (tname[0] == 's')
543 if ((tname[1] == 'e'
544 && (! strcmp (tname, "setjmp")
545 || ! strcmp (tname, "setjmp_syscall")))
546 || (tname[1] == 'i'
547 && ! strcmp (tname, "sigsetjmp"))
548 || (tname[1] == 'a'
549 && ! strcmp (tname, "savectx")))
550 flags |= ECF_RETURNS_TWICE;
552 if (tname[1] == 'i'
553 && ! strcmp (tname, "siglongjmp"))
554 flags |= ECF_NORETURN;
556 else if ((tname[0] == 'q' && tname[1] == 's'
557 && ! strcmp (tname, "qsetjmp"))
558 || (tname[0] == 'v' && tname[1] == 'f'
559 && ! strcmp (tname, "vfork"))
560 || (tname[0] == 'g' && tname[1] == 'e'
561 && !strcmp (tname, "getcontext")))
562 flags |= ECF_RETURNS_TWICE;
564 else if (tname[0] == 'l' && tname[1] == 'o'
565 && ! strcmp (tname, "longjmp"))
566 flags |= ECF_NORETURN;
569 return flags;
572 /* Return nonzero when FNDECL represents a call to setjmp. */
575 setjmp_call_p (const_tree fndecl)
577 if (DECL_IS_RETURNS_TWICE (fndecl))
578 return ECF_RETURNS_TWICE;
579 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
583 /* Return true if STMT is an alloca call. */
585 bool
586 gimple_alloca_call_p (const_gimple stmt)
588 tree fndecl;
590 if (!is_gimple_call (stmt))
591 return false;
593 fndecl = gimple_call_fndecl (stmt);
594 if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
595 return true;
597 return false;
600 /* Return true when exp contains alloca call. */
602 bool
603 alloca_call_p (const_tree exp)
605 if (TREE_CODE (exp) == CALL_EXPR
606 && TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
607 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0)) == FUNCTION_DECL)
608 && (special_function_p (TREE_OPERAND (CALL_EXPR_FN (exp), 0), 0)
609 & ECF_MAY_BE_ALLOCA))
610 return true;
611 return false;
614 /* Return TRUE if FNDECL is either a TM builtin or a TM cloned
615 function. Return FALSE otherwise. */
617 static bool
618 is_tm_builtin (const_tree fndecl)
620 if (fndecl == NULL)
621 return false;
623 if (decl_is_tm_clone (fndecl))
624 return true;
626 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
628 switch (DECL_FUNCTION_CODE (fndecl))
630 case BUILT_IN_TM_COMMIT:
631 case BUILT_IN_TM_COMMIT_EH:
632 case BUILT_IN_TM_ABORT:
633 case BUILT_IN_TM_IRREVOCABLE:
634 case BUILT_IN_TM_GETTMCLONE_IRR:
635 case BUILT_IN_TM_MEMCPY:
636 case BUILT_IN_TM_MEMMOVE:
637 case BUILT_IN_TM_MEMSET:
638 CASE_BUILT_IN_TM_STORE (1):
639 CASE_BUILT_IN_TM_STORE (2):
640 CASE_BUILT_IN_TM_STORE (4):
641 CASE_BUILT_IN_TM_STORE (8):
642 CASE_BUILT_IN_TM_STORE (FLOAT):
643 CASE_BUILT_IN_TM_STORE (DOUBLE):
644 CASE_BUILT_IN_TM_STORE (LDOUBLE):
645 CASE_BUILT_IN_TM_STORE (M64):
646 CASE_BUILT_IN_TM_STORE (M128):
647 CASE_BUILT_IN_TM_STORE (M256):
648 CASE_BUILT_IN_TM_LOAD (1):
649 CASE_BUILT_IN_TM_LOAD (2):
650 CASE_BUILT_IN_TM_LOAD (4):
651 CASE_BUILT_IN_TM_LOAD (8):
652 CASE_BUILT_IN_TM_LOAD (FLOAT):
653 CASE_BUILT_IN_TM_LOAD (DOUBLE):
654 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
655 CASE_BUILT_IN_TM_LOAD (M64):
656 CASE_BUILT_IN_TM_LOAD (M128):
657 CASE_BUILT_IN_TM_LOAD (M256):
658 case BUILT_IN_TM_LOG:
659 case BUILT_IN_TM_LOG_1:
660 case BUILT_IN_TM_LOG_2:
661 case BUILT_IN_TM_LOG_4:
662 case BUILT_IN_TM_LOG_8:
663 case BUILT_IN_TM_LOG_FLOAT:
664 case BUILT_IN_TM_LOG_DOUBLE:
665 case BUILT_IN_TM_LOG_LDOUBLE:
666 case BUILT_IN_TM_LOG_M64:
667 case BUILT_IN_TM_LOG_M128:
668 case BUILT_IN_TM_LOG_M256:
669 return true;
670 default:
671 break;
674 return false;
677 /* Detect flags (function attributes) from the function decl or type node. */
680 flags_from_decl_or_type (const_tree exp)
682 int flags = 0;
684 if (DECL_P (exp))
686 /* The function exp may have the `malloc' attribute. */
687 if (DECL_IS_MALLOC (exp))
688 flags |= ECF_MALLOC;
690 /* The function exp may have the `returns_twice' attribute. */
691 if (DECL_IS_RETURNS_TWICE (exp))
692 flags |= ECF_RETURNS_TWICE;
694 /* Process the pure and const attributes. */
695 if (TREE_READONLY (exp))
696 flags |= ECF_CONST;
697 if (DECL_PURE_P (exp))
698 flags |= ECF_PURE;
699 if (DECL_LOOPING_CONST_OR_PURE_P (exp))
700 flags |= ECF_LOOPING_CONST_OR_PURE;
702 if (DECL_IS_NOVOPS (exp))
703 flags |= ECF_NOVOPS;
704 if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
705 flags |= ECF_LEAF;
707 if (TREE_NOTHROW (exp))
708 flags |= ECF_NOTHROW;
710 if (flag_tm)
712 if (is_tm_builtin (exp))
713 flags |= ECF_TM_BUILTIN;
714 else if ((flags & ECF_CONST) != 0
715 || lookup_attribute ("transaction_pure",
716 TYPE_ATTRIBUTES (TREE_TYPE (exp))))
717 flags |= ECF_TM_PURE;
720 flags = special_function_p (exp, flags);
722 else if (TYPE_P (exp))
724 if (TYPE_READONLY (exp))
725 flags |= ECF_CONST;
727 if (flag_tm
728 && ((flags & ECF_CONST) != 0
729 || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
730 flags |= ECF_TM_PURE;
733 if (TREE_THIS_VOLATILE (exp))
735 flags |= ECF_NORETURN;
736 if (flags & (ECF_CONST|ECF_PURE))
737 flags |= ECF_LOOPING_CONST_OR_PURE;
740 return flags;
743 /* Detect flags from a CALL_EXPR. */
746 call_expr_flags (const_tree t)
748 int flags;
749 tree decl = get_callee_fndecl (t);
751 if (decl)
752 flags = flags_from_decl_or_type (decl);
753 else
755 t = TREE_TYPE (CALL_EXPR_FN (t));
756 if (t && TREE_CODE (t) == POINTER_TYPE)
757 flags = flags_from_decl_or_type (TREE_TYPE (t));
758 else
759 flags = 0;
762 return flags;
765 /* Precompute all register parameters as described by ARGS, storing values
766 into fields within the ARGS array.
768 NUM_ACTUALS indicates the total number elements in the ARGS array.
770 Set REG_PARM_SEEN if we encounter a register parameter. */
772 static void
773 precompute_register_parameters (int num_actuals, struct arg_data *args,
774 int *reg_parm_seen)
776 int i;
778 *reg_parm_seen = 0;
780 for (i = 0; i < num_actuals; i++)
781 if (args[i].reg != 0 && ! args[i].pass_on_stack)
783 *reg_parm_seen = 1;
785 if (args[i].value == 0)
787 push_temp_slots ();
788 args[i].value = expand_normal (args[i].tree_value);
789 preserve_temp_slots (args[i].value);
790 pop_temp_slots ();
793 /* If we are to promote the function arg to a wider mode,
794 do it now. */
796 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
797 args[i].value
798 = convert_modes (args[i].mode,
799 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
800 args[i].value, args[i].unsignedp);
802 /* If the value is a non-legitimate constant, force it into a
803 pseudo now. TLS symbols sometimes need a call to resolve. */
804 if (CONSTANT_P (args[i].value)
805 && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
806 args[i].value = force_reg (args[i].mode, args[i].value);
808 /* If we're going to have to load the value by parts, pull the
809 parts into pseudos. The part extraction process can involve
810 non-trivial computation. */
811 if (GET_CODE (args[i].reg) == PARALLEL)
813 tree type = TREE_TYPE (args[i].tree_value);
814 args[i].parallel_value
815 = emit_group_load_into_temps (args[i].reg, args[i].value,
816 type, int_size_in_bytes (type));
819 /* If the value is expensive, and we are inside an appropriately
820 short loop, put the value into a pseudo and then put the pseudo
821 into the hard reg.
823 For small register classes, also do this if this call uses
824 register parameters. This is to avoid reload conflicts while
825 loading the parameters registers. */
827 else if ((! (REG_P (args[i].value)
828 || (GET_CODE (args[i].value) == SUBREG
829 && REG_P (SUBREG_REG (args[i].value)))))
830 && args[i].mode != BLKmode
831 && set_src_cost (args[i].value, optimize_insn_for_speed_p ())
832 > COSTS_N_INSNS (1)
833 && ((*reg_parm_seen
834 && targetm.small_register_classes_for_mode_p (args[i].mode))
835 || optimize))
836 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
840 #ifdef REG_PARM_STACK_SPACE
842 /* The argument list is the property of the called routine and it
843 may clobber it. If the fixed area has been used for previous
844 parameters, we must save and restore it. */
846 static rtx
847 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
849 int low;
850 int high;
852 /* Compute the boundary of the area that needs to be saved, if any. */
853 high = reg_parm_stack_space;
854 #ifdef ARGS_GROW_DOWNWARD
855 high += 1;
856 #endif
857 if (high > highest_outgoing_arg_in_use)
858 high = highest_outgoing_arg_in_use;
860 for (low = 0; low < high; low++)
861 if (stack_usage_map[low] != 0)
863 int num_to_save;
864 enum machine_mode save_mode;
865 int delta;
866 rtx stack_area;
867 rtx save_area;
869 while (stack_usage_map[--high] == 0)
872 *low_to_save = low;
873 *high_to_save = high;
875 num_to_save = high - low + 1;
876 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
878 /* If we don't have the required alignment, must do this
879 in BLKmode. */
880 if ((low & (MIN (GET_MODE_SIZE (save_mode),
881 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
882 save_mode = BLKmode;
884 #ifdef ARGS_GROW_DOWNWARD
885 delta = -high;
886 #else
887 delta = low;
888 #endif
889 stack_area = gen_rtx_MEM (save_mode,
890 memory_address (save_mode,
891 plus_constant (argblock,
892 delta)));
894 set_mem_align (stack_area, PARM_BOUNDARY);
895 if (save_mode == BLKmode)
897 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
898 emit_block_move (validize_mem (save_area), stack_area,
899 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
901 else
903 save_area = gen_reg_rtx (save_mode);
904 emit_move_insn (save_area, stack_area);
907 return save_area;
910 return NULL_RTX;
913 static void
914 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
916 enum machine_mode save_mode = GET_MODE (save_area);
917 int delta;
918 rtx stack_area;
920 #ifdef ARGS_GROW_DOWNWARD
921 delta = -high_to_save;
922 #else
923 delta = low_to_save;
924 #endif
925 stack_area = gen_rtx_MEM (save_mode,
926 memory_address (save_mode,
927 plus_constant (argblock, delta)));
928 set_mem_align (stack_area, PARM_BOUNDARY);
930 if (save_mode != BLKmode)
931 emit_move_insn (stack_area, save_area);
932 else
933 emit_block_move (stack_area, validize_mem (save_area),
934 GEN_INT (high_to_save - low_to_save + 1),
935 BLOCK_OP_CALL_PARM);
937 #endif /* REG_PARM_STACK_SPACE */
939 /* If any elements in ARGS refer to parameters that are to be passed in
940 registers, but not in memory, and whose alignment does not permit a
941 direct copy into registers. Copy the values into a group of pseudos
942 which we will later copy into the appropriate hard registers.
944 Pseudos for each unaligned argument will be stored into the array
945 args[argnum].aligned_regs. The caller is responsible for deallocating
946 the aligned_regs array if it is nonzero. */
948 static void
949 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
951 int i, j;
953 for (i = 0; i < num_actuals; i++)
954 if (args[i].reg != 0 && ! args[i].pass_on_stack
955 && args[i].mode == BLKmode
956 && MEM_P (args[i].value)
957 && (MEM_ALIGN (args[i].value)
958 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
960 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
961 int endian_correction = 0;
963 if (args[i].partial)
965 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
966 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
968 else
970 args[i].n_aligned_regs
971 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
974 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
976 /* Structures smaller than a word are normally aligned to the
977 least significant byte. On a BYTES_BIG_ENDIAN machine,
978 this means we must skip the empty high order bytes when
979 calculating the bit offset. */
980 if (bytes < UNITS_PER_WORD
981 #ifdef BLOCK_REG_PADDING
982 && (BLOCK_REG_PADDING (args[i].mode,
983 TREE_TYPE (args[i].tree_value), 1)
984 == downward)
985 #else
986 && BYTES_BIG_ENDIAN
987 #endif
989 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
991 for (j = 0; j < args[i].n_aligned_regs; j++)
993 rtx reg = gen_reg_rtx (word_mode);
994 rtx word = operand_subword_force (args[i].value, j, BLKmode);
995 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
997 args[i].aligned_regs[j] = reg;
998 word = extract_bit_field (word, bitsize, 0, 1, false, NULL_RTX,
999 word_mode, word_mode);
1001 /* There is no need to restrict this code to loading items
1002 in TYPE_ALIGN sized hunks. The bitfield instructions can
1003 load up entire word sized registers efficiently.
1005 ??? This may not be needed anymore.
1006 We use to emit a clobber here but that doesn't let later
1007 passes optimize the instructions we emit. By storing 0 into
1008 the register later passes know the first AND to zero out the
1009 bitfield being set in the register is unnecessary. The store
1010 of 0 will be deleted as will at least the first AND. */
1012 emit_move_insn (reg, const0_rtx);
1014 bytes -= bitsize / BITS_PER_UNIT;
1015 store_bit_field (reg, bitsize, endian_correction, 0, 0,
1016 word_mode, word);
1021 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1022 CALL_EXPR EXP.
1024 NUM_ACTUALS is the total number of parameters.
1026 N_NAMED_ARGS is the total number of named arguments.
1028 STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
1029 value, or null.
1031 FNDECL is the tree code for the target of this call (if known)
1033 ARGS_SO_FAR holds state needed by the target to know where to place
1034 the next argument.
1036 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1037 for arguments which are passed in registers.
1039 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1040 and may be modified by this routine.
1042 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1043 flags which may may be modified by this routine.
1045 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
1046 that requires allocation of stack space.
1048 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
1049 the thunked-to function. */
1051 static void
1052 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
1053 struct arg_data *args,
1054 struct args_size *args_size,
1055 int n_named_args ATTRIBUTE_UNUSED,
1056 tree exp, tree struct_value_addr_value,
1057 tree fndecl, tree fntype,
1058 cumulative_args_t args_so_far,
1059 int reg_parm_stack_space,
1060 rtx *old_stack_level, int *old_pending_adj,
1061 int *must_preallocate, int *ecf_flags,
1062 bool *may_tailcall, bool call_from_thunk_p)
1064 CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
1065 location_t loc = EXPR_LOCATION (exp);
1066 /* 1 if scanning parms front to back, -1 if scanning back to front. */
1067 int inc;
1069 /* Count arg position in order args appear. */
1070 int argpos;
1072 int i;
1074 args_size->constant = 0;
1075 args_size->var = 0;
1077 /* In this loop, we consider args in the order they are written.
1078 We fill up ARGS from the front or from the back if necessary
1079 so that in any case the first arg to be pushed ends up at the front. */
1081 if (PUSH_ARGS_REVERSED)
1083 i = num_actuals - 1, inc = -1;
1084 /* In this case, must reverse order of args
1085 so that we compute and push the last arg first. */
1087 else
1089 i = 0, inc = 1;
1092 /* First fill in the actual arguments in the ARGS array, splitting
1093 complex arguments if necessary. */
1095 int j = i;
1096 call_expr_arg_iterator iter;
1097 tree arg;
1099 if (struct_value_addr_value)
1101 args[j].tree_value = struct_value_addr_value;
1102 j += inc;
1104 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
1106 tree argtype = TREE_TYPE (arg);
1107 if (targetm.calls.split_complex_arg
1108 && argtype
1109 && TREE_CODE (argtype) == COMPLEX_TYPE
1110 && targetm.calls.split_complex_arg (argtype))
1112 tree subtype = TREE_TYPE (argtype);
1113 args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
1114 j += inc;
1115 args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
1117 else
1118 args[j].tree_value = arg;
1119 j += inc;
1123 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1124 for (argpos = 0; argpos < num_actuals; i += inc, argpos++)
1126 tree type = TREE_TYPE (args[i].tree_value);
1127 int unsignedp;
1128 enum machine_mode mode;
1130 /* Replace erroneous argument with constant zero. */
1131 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1132 args[i].tree_value = integer_zero_node, type = integer_type_node;
1134 /* If TYPE is a transparent union or record, pass things the way
1135 we would pass the first field of the union or record. We have
1136 already verified that the modes are the same. */
1137 if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE)
1138 && TYPE_TRANSPARENT_AGGR (type))
1139 type = TREE_TYPE (first_field (type));
1141 /* Decide where to pass this arg.
1143 args[i].reg is nonzero if all or part is passed in registers.
1145 args[i].partial is nonzero if part but not all is passed in registers,
1146 and the exact value says how many bytes are passed in registers.
1148 args[i].pass_on_stack is nonzero if the argument must at least be
1149 computed on the stack. It may then be loaded back into registers
1150 if args[i].reg is nonzero.
1152 These decisions are driven by the FUNCTION_... macros and must agree
1153 with those made by function.c. */
1155 /* See if this argument should be passed by invisible reference. */
1156 if (pass_by_reference (args_so_far_pnt, TYPE_MODE (type),
1157 type, argpos < n_named_args))
1159 bool callee_copies;
1160 tree base = NULL_TREE;
1162 callee_copies
1163 = reference_callee_copied (args_so_far_pnt, TYPE_MODE (type),
1164 type, argpos < n_named_args);
1166 /* If we're compiling a thunk, pass through invisible references
1167 instead of making a copy. */
1168 if (call_from_thunk_p
1169 || (callee_copies
1170 && !TREE_ADDRESSABLE (type)
1171 && (base = get_base_address (args[i].tree_value))
1172 && TREE_CODE (base) != SSA_NAME
1173 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
1175 mark_addressable (args[i].tree_value);
1177 /* We can't use sibcalls if a callee-copied argument is
1178 stored in the current function's frame. */
1179 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1180 *may_tailcall = false;
1182 args[i].tree_value = build_fold_addr_expr_loc (loc,
1183 args[i].tree_value);
1184 type = TREE_TYPE (args[i].tree_value);
1186 if (*ecf_flags & ECF_CONST)
1187 *ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
1189 else
1191 /* We make a copy of the object and pass the address to the
1192 function being called. */
1193 rtx copy;
1195 if (!COMPLETE_TYPE_P (type)
1196 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
1197 || (flag_stack_check == GENERIC_STACK_CHECK
1198 && compare_tree_int (TYPE_SIZE_UNIT (type),
1199 STACK_CHECK_MAX_VAR_SIZE) > 0))
1201 /* This is a variable-sized object. Make space on the stack
1202 for it. */
1203 rtx size_rtx = expr_size (args[i].tree_value);
1205 if (*old_stack_level == 0)
1207 emit_stack_save (SAVE_BLOCK, old_stack_level);
1208 *old_pending_adj = pending_stack_adjust;
1209 pending_stack_adjust = 0;
1212 /* We can pass TRUE as the 4th argument because we just
1213 saved the stack pointer and will restore it right after
1214 the call. */
1215 copy = allocate_dynamic_stack_space (size_rtx,
1216 TYPE_ALIGN (type),
1217 TYPE_ALIGN (type),
1218 true);
1219 copy = gen_rtx_MEM (BLKmode, copy);
1220 set_mem_attributes (copy, type, 1);
1222 else
1223 copy = assign_temp (type, 0, 1, 0);
1225 store_expr (args[i].tree_value, copy, 0, false);
1227 /* Just change the const function to pure and then let
1228 the next test clear the pure based on
1229 callee_copies. */
1230 if (*ecf_flags & ECF_CONST)
1232 *ecf_flags &= ~ECF_CONST;
1233 *ecf_flags |= ECF_PURE;
1236 if (!callee_copies && *ecf_flags & ECF_PURE)
1237 *ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
1239 args[i].tree_value
1240 = build_fold_addr_expr_loc (loc, make_tree (type, copy));
1241 type = TREE_TYPE (args[i].tree_value);
1242 *may_tailcall = false;
1246 unsignedp = TYPE_UNSIGNED (type);
1247 mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
1248 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
1250 args[i].unsignedp = unsignedp;
1251 args[i].mode = mode;
1253 args[i].reg = targetm.calls.function_arg (args_so_far, mode, type,
1254 argpos < n_named_args);
1256 /* If this is a sibling call and the machine has register windows, the
1257 register window has to be unwinded before calling the routine, so
1258 arguments have to go into the incoming registers. */
1259 if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
1260 args[i].tail_call_reg
1261 = targetm.calls.function_incoming_arg (args_so_far, mode, type,
1262 argpos < n_named_args);
1263 else
1264 args[i].tail_call_reg = args[i].reg;
1266 if (args[i].reg)
1267 args[i].partial
1268 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1269 argpos < n_named_args);
1271 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
1273 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1274 it means that we are to pass this arg in the register(s) designated
1275 by the PARALLEL, but also to pass it in the stack. */
1276 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1277 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1278 args[i].pass_on_stack = 1;
1280 /* If this is an addressable type, we must preallocate the stack
1281 since we must evaluate the object into its final location.
1283 If this is to be passed in both registers and the stack, it is simpler
1284 to preallocate. */
1285 if (TREE_ADDRESSABLE (type)
1286 || (args[i].pass_on_stack && args[i].reg != 0))
1287 *must_preallocate = 1;
1289 /* Compute the stack-size of this argument. */
1290 if (args[i].reg == 0 || args[i].partial != 0
1291 || reg_parm_stack_space > 0
1292 || args[i].pass_on_stack)
1293 locate_and_pad_parm (mode, type,
1294 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1296 #else
1297 args[i].reg != 0,
1298 #endif
1299 args[i].pass_on_stack ? 0 : args[i].partial,
1300 fndecl, args_size, &args[i].locate);
1301 #ifdef BLOCK_REG_PADDING
1302 else
1303 /* The argument is passed entirely in registers. See at which
1304 end it should be padded. */
1305 args[i].locate.where_pad =
1306 BLOCK_REG_PADDING (mode, type,
1307 int_size_in_bytes (type) <= UNITS_PER_WORD);
1308 #endif
1310 /* Update ARGS_SIZE, the total stack space for args so far. */
1312 args_size->constant += args[i].locate.size.constant;
1313 if (args[i].locate.size.var)
1314 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1316 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1317 have been used, etc. */
1319 targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type),
1320 type, argpos < n_named_args);
1324 /* Update ARGS_SIZE to contain the total size for the argument block.
1325 Return the original constant component of the argument block's size.
1327 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1328 for arguments passed in registers. */
1330 static int
1331 compute_argument_block_size (int reg_parm_stack_space,
1332 struct args_size *args_size,
1333 tree fndecl ATTRIBUTE_UNUSED,
1334 tree fntype ATTRIBUTE_UNUSED,
1335 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1337 int unadjusted_args_size = args_size->constant;
1339 /* For accumulate outgoing args mode we don't need to align, since the frame
1340 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1341 backends from generating misaligned frame sizes. */
1342 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1343 preferred_stack_boundary = STACK_BOUNDARY;
1345 /* Compute the actual size of the argument block required. The variable
1346 and constant sizes must be combined, the size may have to be rounded,
1347 and there may be a minimum required size. */
1349 if (args_size->var)
1351 args_size->var = ARGS_SIZE_TREE (*args_size);
1352 args_size->constant = 0;
1354 preferred_stack_boundary /= BITS_PER_UNIT;
1355 if (preferred_stack_boundary > 1)
1357 /* We don't handle this case yet. To handle it correctly we have
1358 to add the delta, round and subtract the delta.
1359 Currently no machine description requires this support. */
1360 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
1361 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1364 if (reg_parm_stack_space > 0)
1366 args_size->var
1367 = size_binop (MAX_EXPR, args_size->var,
1368 ssize_int (reg_parm_stack_space));
1370 /* The area corresponding to register parameters is not to count in
1371 the size of the block we need. So make the adjustment. */
1372 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
1373 args_size->var
1374 = size_binop (MINUS_EXPR, args_size->var,
1375 ssize_int (reg_parm_stack_space));
1378 else
1380 preferred_stack_boundary /= BITS_PER_UNIT;
1381 if (preferred_stack_boundary < 1)
1382 preferred_stack_boundary = 1;
1383 args_size->constant = (((args_size->constant
1384 + stack_pointer_delta
1385 + preferred_stack_boundary - 1)
1386 / preferred_stack_boundary
1387 * preferred_stack_boundary)
1388 - stack_pointer_delta);
1390 args_size->constant = MAX (args_size->constant,
1391 reg_parm_stack_space);
1393 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
1394 args_size->constant -= reg_parm_stack_space;
1396 return unadjusted_args_size;
1399 /* Precompute parameters as needed for a function call.
1401 FLAGS is mask of ECF_* constants.
1403 NUM_ACTUALS is the number of arguments.
1405 ARGS is an array containing information for each argument; this
1406 routine fills in the INITIAL_VALUE and VALUE fields for each
1407 precomputed argument. */
1409 static void
1410 precompute_arguments (int num_actuals, struct arg_data *args)
1412 int i;
1414 /* If this is a libcall, then precompute all arguments so that we do not
1415 get extraneous instructions emitted as part of the libcall sequence. */
1417 /* If we preallocated the stack space, and some arguments must be passed
1418 on the stack, then we must precompute any parameter which contains a
1419 function call which will store arguments on the stack.
1420 Otherwise, evaluating the parameter may clobber previous parameters
1421 which have already been stored into the stack. (we have code to avoid
1422 such case by saving the outgoing stack arguments, but it results in
1423 worse code) */
1424 if (!ACCUMULATE_OUTGOING_ARGS)
1425 return;
1427 for (i = 0; i < num_actuals; i++)
1429 tree type;
1430 enum machine_mode mode;
1432 if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
1433 continue;
1435 /* If this is an addressable type, we cannot pre-evaluate it. */
1436 type = TREE_TYPE (args[i].tree_value);
1437 gcc_assert (!TREE_ADDRESSABLE (type));
1439 args[i].initial_value = args[i].value
1440 = expand_normal (args[i].tree_value);
1442 mode = TYPE_MODE (type);
1443 if (mode != args[i].mode)
1445 int unsignedp = args[i].unsignedp;
1446 args[i].value
1447 = convert_modes (args[i].mode, mode,
1448 args[i].value, args[i].unsignedp);
1450 /* CSE will replace this only if it contains args[i].value
1451 pseudo, so convert it down to the declared mode using
1452 a SUBREG. */
1453 if (REG_P (args[i].value)
1454 && GET_MODE_CLASS (args[i].mode) == MODE_INT
1455 && promote_mode (type, mode, &unsignedp) != args[i].mode)
1457 args[i].initial_value
1458 = gen_lowpart_SUBREG (mode, args[i].value);
1459 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1460 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1461 args[i].unsignedp);
1467 /* Given the current state of MUST_PREALLOCATE and information about
1468 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1469 compute and return the final value for MUST_PREALLOCATE. */
1471 static int
1472 finalize_must_preallocate (int must_preallocate, int num_actuals,
1473 struct arg_data *args, struct args_size *args_size)
1475 /* See if we have or want to preallocate stack space.
1477 If we would have to push a partially-in-regs parm
1478 before other stack parms, preallocate stack space instead.
1480 If the size of some parm is not a multiple of the required stack
1481 alignment, we must preallocate.
1483 If the total size of arguments that would otherwise create a copy in
1484 a temporary (such as a CALL) is more than half the total argument list
1485 size, preallocation is faster.
1487 Another reason to preallocate is if we have a machine (like the m88k)
1488 where stack alignment is required to be maintained between every
1489 pair of insns, not just when the call is made. However, we assume here
1490 that such machines either do not have push insns (and hence preallocation
1491 would occur anyway) or the problem is taken care of with
1492 PUSH_ROUNDING. */
1494 if (! must_preallocate)
1496 int partial_seen = 0;
1497 int copy_to_evaluate_size = 0;
1498 int i;
1500 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1502 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1503 partial_seen = 1;
1504 else if (partial_seen && args[i].reg == 0)
1505 must_preallocate = 1;
1507 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1508 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1509 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1510 || TREE_CODE (args[i].tree_value) == COND_EXPR
1511 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1512 copy_to_evaluate_size
1513 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1516 if (copy_to_evaluate_size * 2 >= args_size->constant
1517 && args_size->constant > 0)
1518 must_preallocate = 1;
1520 return must_preallocate;
1523 /* If we preallocated stack space, compute the address of each argument
1524 and store it into the ARGS array.
1526 We need not ensure it is a valid memory address here; it will be
1527 validized when it is used.
1529 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1531 static void
1532 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1534 if (argblock)
1536 rtx arg_reg = argblock;
1537 int i, arg_offset = 0;
1539 if (GET_CODE (argblock) == PLUS)
1540 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1542 for (i = 0; i < num_actuals; i++)
1544 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1545 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1546 rtx addr;
1547 unsigned int align, boundary;
1548 unsigned int units_on_stack = 0;
1549 enum machine_mode partial_mode = VOIDmode;
1551 /* Skip this parm if it will not be passed on the stack. */
1552 if (! args[i].pass_on_stack
1553 && args[i].reg != 0
1554 && args[i].partial == 0)
1555 continue;
1557 if (CONST_INT_P (offset))
1558 addr = plus_constant (arg_reg, INTVAL (offset));
1559 else
1560 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1562 addr = plus_constant (addr, arg_offset);
1564 if (args[i].partial != 0)
1566 /* Only part of the parameter is being passed on the stack.
1567 Generate a simple memory reference of the correct size. */
1568 units_on_stack = args[i].locate.size.constant;
1569 partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
1570 MODE_INT, 1);
1571 args[i].stack = gen_rtx_MEM (partial_mode, addr);
1572 set_mem_size (args[i].stack, units_on_stack);
1574 else
1576 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1577 set_mem_attributes (args[i].stack,
1578 TREE_TYPE (args[i].tree_value), 1);
1580 align = BITS_PER_UNIT;
1581 boundary = args[i].locate.boundary;
1582 if (args[i].locate.where_pad != downward)
1583 align = boundary;
1584 else if (CONST_INT_P (offset))
1586 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1587 align = align & -align;
1589 set_mem_align (args[i].stack, align);
1591 if (CONST_INT_P (slot_offset))
1592 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1593 else
1594 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1596 addr = plus_constant (addr, arg_offset);
1598 if (args[i].partial != 0)
1600 /* Only part of the parameter is being passed on the stack.
1601 Generate a simple memory reference of the correct size.
1603 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
1604 set_mem_size (args[i].stack_slot, units_on_stack);
1606 else
1608 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1609 set_mem_attributes (args[i].stack_slot,
1610 TREE_TYPE (args[i].tree_value), 1);
1612 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
1614 /* Function incoming arguments may overlap with sibling call
1615 outgoing arguments and we cannot allow reordering of reads
1616 from function arguments with stores to outgoing arguments
1617 of sibling calls. */
1618 set_mem_alias_set (args[i].stack, 0);
1619 set_mem_alias_set (args[i].stack_slot, 0);
1624 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1625 in a call instruction.
1627 FNDECL is the tree node for the target function. For an indirect call
1628 FNDECL will be NULL_TREE.
1630 ADDR is the operand 0 of CALL_EXPR for this call. */
1632 static rtx
1633 rtx_for_function_call (tree fndecl, tree addr)
1635 rtx funexp;
1637 /* Get the function to call, in the form of RTL. */
1638 if (fndecl)
1640 /* If this is the first use of the function, see if we need to
1641 make an external definition for it. */
1642 if (!TREE_USED (fndecl) && fndecl != current_function_decl)
1644 assemble_external (fndecl);
1645 TREE_USED (fndecl) = 1;
1648 /* Get a SYMBOL_REF rtx for the function address. */
1649 funexp = XEXP (DECL_RTL (fndecl), 0);
1651 else
1652 /* Generate an rtx (probably a pseudo-register) for the address. */
1654 push_temp_slots ();
1655 funexp = expand_normal (addr);
1656 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1658 return funexp;
1661 /* Internal state for internal_arg_pointer_based_exp and its helpers. */
1662 static struct
1664 /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
1665 or NULL_RTX if none has been scanned yet. */
1666 rtx scan_start;
1667 /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
1668 based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
1669 pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
1670 with fixed offset, or PC if this is with variable or unknown offset. */
1671 VEC(rtx, heap) *cache;
1672 } internal_arg_pointer_exp_state;
1674 static rtx internal_arg_pointer_based_exp (rtx, bool);
1676 /* Helper function for internal_arg_pointer_based_exp. Scan insns in
1677 the tail call sequence, starting with first insn that hasn't been
1678 scanned yet, and note for each pseudo on the LHS whether it is based
1679 on crtl->args.internal_arg_pointer or not, and what offset from that
1680 that pointer it has. */
1682 static void
1683 internal_arg_pointer_based_exp_scan (void)
1685 rtx insn, scan_start = internal_arg_pointer_exp_state.scan_start;
1687 if (scan_start == NULL_RTX)
1688 insn = get_insns ();
1689 else
1690 insn = NEXT_INSN (scan_start);
1692 while (insn)
1694 rtx set = single_set (insn);
1695 if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
1697 rtx val = NULL_RTX;
1698 unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
1699 /* Punt on pseudos set multiple times. */
1700 if (idx < VEC_length (rtx, internal_arg_pointer_exp_state.cache)
1701 && (VEC_index (rtx, internal_arg_pointer_exp_state.cache, idx)
1702 != NULL_RTX))
1703 val = pc_rtx;
1704 else
1705 val = internal_arg_pointer_based_exp (SET_SRC (set), false);
1706 if (val != NULL_RTX)
1708 if (idx
1709 >= VEC_length (rtx, internal_arg_pointer_exp_state.cache))
1710 VEC_safe_grow_cleared (rtx, heap,
1711 internal_arg_pointer_exp_state.cache,
1712 idx + 1);
1713 VEC_replace (rtx, internal_arg_pointer_exp_state.cache,
1714 idx, val);
1717 if (NEXT_INSN (insn) == NULL_RTX)
1718 scan_start = insn;
1719 insn = NEXT_INSN (insn);
1722 internal_arg_pointer_exp_state.scan_start = scan_start;
1725 /* Helper function for internal_arg_pointer_based_exp, called through
1726 for_each_rtx. Return 1 if *LOC is a register based on
1727 crtl->args.internal_arg_pointer. Return -1 if *LOC is not based on it
1728 and the subexpressions need not be examined. Otherwise return 0. */
1730 static int
1731 internal_arg_pointer_based_exp_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
1733 if (REG_P (*loc) && internal_arg_pointer_based_exp (*loc, false) != NULL_RTX)
1734 return 1;
1735 if (MEM_P (*loc))
1736 return -1;
1737 return 0;
1740 /* Compute whether RTL is based on crtl->args.internal_arg_pointer. Return
1741 NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
1742 it with fixed offset, or PC if this is with variable or unknown offset.
1743 TOPLEVEL is true if the function is invoked at the topmost level. */
1745 static rtx
1746 internal_arg_pointer_based_exp (rtx rtl, bool toplevel)
1748 if (CONSTANT_P (rtl))
1749 return NULL_RTX;
1751 if (rtl == crtl->args.internal_arg_pointer)
1752 return const0_rtx;
1754 if (REG_P (rtl) && HARD_REGISTER_P (rtl))
1755 return NULL_RTX;
1757 if (GET_CODE (rtl) == PLUS && CONST_INT_P (XEXP (rtl, 1)))
1759 rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
1760 if (val == NULL_RTX || val == pc_rtx)
1761 return val;
1762 return plus_constant (val, INTVAL (XEXP (rtl, 1)));
1765 /* When called at the topmost level, scan pseudo assignments in between the
1766 last scanned instruction in the tail call sequence and the latest insn
1767 in that sequence. */
1768 if (toplevel)
1769 internal_arg_pointer_based_exp_scan ();
1771 if (REG_P (rtl))
1773 unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
1774 if (idx < VEC_length (rtx, internal_arg_pointer_exp_state.cache))
1775 return VEC_index (rtx, internal_arg_pointer_exp_state.cache, idx);
1777 return NULL_RTX;
1780 if (for_each_rtx (&rtl, internal_arg_pointer_based_exp_1, NULL))
1781 return pc_rtx;
1783 return NULL_RTX;
1786 /* Return true if and only if SIZE storage units (usually bytes)
1787 starting from address ADDR overlap with already clobbered argument
1788 area. This function is used to determine if we should give up a
1789 sibcall. */
1791 static bool
1792 mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
1794 HOST_WIDE_INT i;
1795 rtx val;
1797 if (sbitmap_empty_p (stored_args_map))
1798 return false;
1799 val = internal_arg_pointer_based_exp (addr, true);
1800 if (val == NULL_RTX)
1801 return false;
1802 else if (val == pc_rtx)
1803 return true;
1804 else
1805 i = INTVAL (val);
1807 #ifdef ARGS_GROW_DOWNWARD
1808 i = -i - size;
1809 #endif
1810 if (size > 0)
1812 unsigned HOST_WIDE_INT k;
1814 for (k = 0; k < size; k++)
1815 if (i + k < stored_args_map->n_bits
1816 && TEST_BIT (stored_args_map, i + k))
1817 return true;
1820 return false;
1823 /* Do the register loads required for any wholly-register parms or any
1824 parms which are passed both on the stack and in a register. Their
1825 expressions were already evaluated.
1827 Mark all register-parms as living through the call, putting these USE
1828 insns in the CALL_INSN_FUNCTION_USAGE field.
1830 When IS_SIBCALL, perform the check_sibcall_argument_overlap
1831 checking, setting *SIBCALL_FAILURE if appropriate. */
1833 static void
1834 load_register_parameters (struct arg_data *args, int num_actuals,
1835 rtx *call_fusage, int flags, int is_sibcall,
1836 int *sibcall_failure)
1838 int i, j;
1840 for (i = 0; i < num_actuals; i++)
1842 rtx reg = ((flags & ECF_SIBCALL)
1843 ? args[i].tail_call_reg : args[i].reg);
1844 if (reg)
1846 int partial = args[i].partial;
1847 int nregs;
1848 int size = 0;
1849 rtx before_arg = get_last_insn ();
1850 /* Set non-negative if we must move a word at a time, even if
1851 just one word (e.g, partial == 4 && mode == DFmode). Set
1852 to -1 if we just use a normal move insn. This value can be
1853 zero if the argument is a zero size structure. */
1854 nregs = -1;
1855 if (GET_CODE (reg) == PARALLEL)
1857 else if (partial)
1859 gcc_assert (partial % UNITS_PER_WORD == 0);
1860 nregs = partial / UNITS_PER_WORD;
1862 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1864 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1865 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1867 else
1868 size = GET_MODE_SIZE (args[i].mode);
1870 /* Handle calls that pass values in multiple non-contiguous
1871 locations. The Irix 6 ABI has examples of this. */
1873 if (GET_CODE (reg) == PARALLEL)
1874 emit_group_move (reg, args[i].parallel_value);
1876 /* If simple case, just do move. If normal partial, store_one_arg
1877 has already loaded the register for us. In all other cases,
1878 load the register(s) from memory. */
1880 else if (nregs == -1)
1882 emit_move_insn (reg, args[i].value);
1883 #ifdef BLOCK_REG_PADDING
1884 /* Handle case where we have a value that needs shifting
1885 up to the msb. eg. a QImode value and we're padding
1886 upward on a BYTES_BIG_ENDIAN machine. */
1887 if (size < UNITS_PER_WORD
1888 && (args[i].locate.where_pad
1889 == (BYTES_BIG_ENDIAN ? upward : downward)))
1891 rtx x;
1892 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1894 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1895 report the whole reg as used. Strictly speaking, the
1896 call only uses SIZE bytes at the msb end, but it doesn't
1897 seem worth generating rtl to say that. */
1898 reg = gen_rtx_REG (word_mode, REGNO (reg));
1899 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
1900 if (x != reg)
1901 emit_move_insn (reg, x);
1903 #endif
1906 /* If we have pre-computed the values to put in the registers in
1907 the case of non-aligned structures, copy them in now. */
1909 else if (args[i].n_aligned_regs != 0)
1910 for (j = 0; j < args[i].n_aligned_regs; j++)
1911 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1912 args[i].aligned_regs[j]);
1914 else if (partial == 0 || args[i].pass_on_stack)
1916 rtx mem = validize_mem (args[i].value);
1918 /* Check for overlap with already clobbered argument area,
1919 providing that this has non-zero size. */
1920 if (is_sibcall
1921 && (size == 0
1922 || mem_overlaps_already_clobbered_arg_p
1923 (XEXP (args[i].value, 0), size)))
1924 *sibcall_failure = 1;
1926 /* Handle a BLKmode that needs shifting. */
1927 if (nregs == 1 && size < UNITS_PER_WORD
1928 #ifdef BLOCK_REG_PADDING
1929 && args[i].locate.where_pad == downward
1930 #else
1931 && BYTES_BIG_ENDIAN
1932 #endif
1935 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1936 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1937 rtx x = gen_reg_rtx (word_mode);
1938 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1939 enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1940 : LSHIFT_EXPR;
1942 emit_move_insn (x, tem);
1943 x = expand_shift (dir, word_mode, x, shift, ri, 1);
1944 if (x != ri)
1945 emit_move_insn (ri, x);
1947 else
1948 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1951 /* When a parameter is a block, and perhaps in other cases, it is
1952 possible that it did a load from an argument slot that was
1953 already clobbered. */
1954 if (is_sibcall
1955 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1956 *sibcall_failure = 1;
1958 /* Handle calls that pass values in multiple non-contiguous
1959 locations. The Irix 6 ABI has examples of this. */
1960 if (GET_CODE (reg) == PARALLEL)
1961 use_group_regs (call_fusage, reg);
1962 else if (nregs == -1)
1963 use_reg_mode (call_fusage, reg,
1964 TYPE_MODE (TREE_TYPE (args[i].tree_value)));
1965 else if (nregs > 0)
1966 use_regs (call_fusage, REGNO (reg), nregs);
1971 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1972 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1973 bytes, then we would need to push some additional bytes to pad the
1974 arguments. So, we compute an adjust to the stack pointer for an
1975 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1976 bytes. Then, when the arguments are pushed the stack will be perfectly
1977 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1978 be popped after the call. Returns the adjustment. */
1980 static int
1981 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1982 struct args_size *args_size,
1983 unsigned int preferred_unit_stack_boundary)
1985 /* The number of bytes to pop so that the stack will be
1986 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1987 HOST_WIDE_INT adjustment;
1988 /* The alignment of the stack after the arguments are pushed, if we
1989 just pushed the arguments without adjust the stack here. */
1990 unsigned HOST_WIDE_INT unadjusted_alignment;
1992 unadjusted_alignment
1993 = ((stack_pointer_delta + unadjusted_args_size)
1994 % preferred_unit_stack_boundary);
1996 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1997 as possible -- leaving just enough left to cancel out the
1998 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1999 PENDING_STACK_ADJUST is non-negative, and congruent to
2000 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
2002 /* Begin by trying to pop all the bytes. */
2003 unadjusted_alignment
2004 = (unadjusted_alignment
2005 - (pending_stack_adjust % preferred_unit_stack_boundary));
2006 adjustment = pending_stack_adjust;
2007 /* Push enough additional bytes that the stack will be aligned
2008 after the arguments are pushed. */
2009 if (preferred_unit_stack_boundary > 1)
2011 if (unadjusted_alignment > 0)
2012 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
2013 else
2014 adjustment += unadjusted_alignment;
2017 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
2018 bytes after the call. The right number is the entire
2019 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
2020 by the arguments in the first place. */
2021 args_size->constant
2022 = pending_stack_adjust - adjustment + unadjusted_args_size;
2024 return adjustment;
2027 /* Scan X expression if it does not dereference any argument slots
2028 we already clobbered by tail call arguments (as noted in stored_args_map
2029 bitmap).
2030 Return nonzero if X expression dereferences such argument slots,
2031 zero otherwise. */
2033 static int
2034 check_sibcall_argument_overlap_1 (rtx x)
2036 RTX_CODE code;
2037 int i, j;
2038 const char *fmt;
2040 if (x == NULL_RTX)
2041 return 0;
2043 code = GET_CODE (x);
2045 /* We need not check the operands of the CALL expression itself. */
2046 if (code == CALL)
2047 return 0;
2049 if (code == MEM)
2050 return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
2051 GET_MODE_SIZE (GET_MODE (x)));
2053 /* Scan all subexpressions. */
2054 fmt = GET_RTX_FORMAT (code);
2055 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2057 if (*fmt == 'e')
2059 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
2060 return 1;
2062 else if (*fmt == 'E')
2064 for (j = 0; j < XVECLEN (x, i); j++)
2065 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
2066 return 1;
2069 return 0;
2072 /* Scan sequence after INSN if it does not dereference any argument slots
2073 we already clobbered by tail call arguments (as noted in stored_args_map
2074 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
2075 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
2076 should be 0). Return nonzero if sequence after INSN dereferences such argument
2077 slots, zero otherwise. */
2079 static int
2080 check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
2082 int low, high;
2084 if (insn == NULL_RTX)
2085 insn = get_insns ();
2086 else
2087 insn = NEXT_INSN (insn);
2089 for (; insn; insn = NEXT_INSN (insn))
2090 if (INSN_P (insn)
2091 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
2092 break;
2094 if (mark_stored_args_map)
2096 #ifdef ARGS_GROW_DOWNWARD
2097 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
2098 #else
2099 low = arg->locate.slot_offset.constant;
2100 #endif
2102 for (high = low + arg->locate.size.constant; low < high; low++)
2103 SET_BIT (stored_args_map, low);
2105 return insn != NULL_RTX;
2108 /* Given that a function returns a value of mode MODE at the most
2109 significant end of hard register VALUE, shift VALUE left or right
2110 as specified by LEFT_P. Return true if some action was needed. */
2112 bool
2113 shift_return_value (enum machine_mode mode, bool left_p, rtx value)
2115 HOST_WIDE_INT shift;
2117 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
2118 shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
2119 if (shift == 0)
2120 return false;
2122 /* Use ashr rather than lshr for right shifts. This is for the benefit
2123 of the MIPS port, which requires SImode values to be sign-extended
2124 when stored in 64-bit registers. */
2125 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
2126 value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
2127 gcc_unreachable ();
2128 return true;
2131 /* If X is a likely-spilled register value, copy it to a pseudo
2132 register and return that register. Return X otherwise. */
2134 static rtx
2135 avoid_likely_spilled_reg (rtx x)
2137 rtx new_rtx;
2139 if (REG_P (x)
2140 && HARD_REGISTER_P (x)
2141 && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
2143 /* Make sure that we generate a REG rather than a CONCAT.
2144 Moves into CONCATs can need nontrivial instructions,
2145 and the whole point of this function is to avoid
2146 using the hard register directly in such a situation. */
2147 generating_concat_p = 0;
2148 new_rtx = gen_reg_rtx (GET_MODE (x));
2149 generating_concat_p = 1;
2150 emit_move_insn (new_rtx, x);
2151 return new_rtx;
2153 return x;
2156 /* Generate all the code for a CALL_EXPR exp
2157 and return an rtx for its value.
2158 Store the value in TARGET (specified as an rtx) if convenient.
2159 If the value is stored in TARGET then TARGET is returned.
2160 If IGNORE is nonzero, then we ignore the value of the function call. */
2163 expand_call (tree exp, rtx target, int ignore)
2165 /* Nonzero if we are currently expanding a call. */
2166 static int currently_expanding_call = 0;
2168 /* RTX for the function to be called. */
2169 rtx funexp;
2170 /* Sequence of insns to perform a normal "call". */
2171 rtx normal_call_insns = NULL_RTX;
2172 /* Sequence of insns to perform a tail "call". */
2173 rtx tail_call_insns = NULL_RTX;
2174 /* Data type of the function. */
2175 tree funtype;
2176 tree type_arg_types;
2177 tree rettype;
2178 /* Declaration of the function being called,
2179 or 0 if the function is computed (not known by name). */
2180 tree fndecl = 0;
2181 /* The type of the function being called. */
2182 tree fntype;
2183 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
2184 int pass;
2186 /* Register in which non-BLKmode value will be returned,
2187 or 0 if no value or if value is BLKmode. */
2188 rtx valreg;
2189 /* Address where we should return a BLKmode value;
2190 0 if value not BLKmode. */
2191 rtx structure_value_addr = 0;
2192 /* Nonzero if that address is being passed by treating it as
2193 an extra, implicit first parameter. Otherwise,
2194 it is passed by being copied directly into struct_value_rtx. */
2195 int structure_value_addr_parm = 0;
2196 /* Holds the value of implicit argument for the struct value. */
2197 tree structure_value_addr_value = NULL_TREE;
2198 /* Size of aggregate value wanted, or zero if none wanted
2199 or if we are using the non-reentrant PCC calling convention
2200 or expecting the value in registers. */
2201 HOST_WIDE_INT struct_value_size = 0;
2202 /* Nonzero if called function returns an aggregate in memory PCC style,
2203 by returning the address of where to find it. */
2204 int pcc_struct_value = 0;
2205 rtx struct_value = 0;
2207 /* Number of actual parameters in this call, including struct value addr. */
2208 int num_actuals;
2209 /* Number of named args. Args after this are anonymous ones
2210 and they must all go on the stack. */
2211 int n_named_args;
2212 /* Number of complex actual arguments that need to be split. */
2213 int num_complex_actuals = 0;
2215 /* Vector of information about each argument.
2216 Arguments are numbered in the order they will be pushed,
2217 not the order they are written. */
2218 struct arg_data *args;
2220 /* Total size in bytes of all the stack-parms scanned so far. */
2221 struct args_size args_size;
2222 struct args_size adjusted_args_size;
2223 /* Size of arguments before any adjustments (such as rounding). */
2224 int unadjusted_args_size;
2225 /* Data on reg parms scanned so far. */
2226 CUMULATIVE_ARGS args_so_far_v;
2227 cumulative_args_t args_so_far;
2228 /* Nonzero if a reg parm has been scanned. */
2229 int reg_parm_seen;
2230 /* Nonzero if this is an indirect function call. */
2232 /* Nonzero if we must avoid push-insns in the args for this call.
2233 If stack space is allocated for register parameters, but not by the
2234 caller, then it is preallocated in the fixed part of the stack frame.
2235 So the entire argument block must then be preallocated (i.e., we
2236 ignore PUSH_ROUNDING in that case). */
2238 int must_preallocate = !PUSH_ARGS;
2240 /* Size of the stack reserved for parameter registers. */
2241 int reg_parm_stack_space = 0;
2243 /* Address of space preallocated for stack parms
2244 (on machines that lack push insns), or 0 if space not preallocated. */
2245 rtx argblock = 0;
2247 /* Mask of ECF_ flags. */
2248 int flags = 0;
2249 #ifdef REG_PARM_STACK_SPACE
2250 /* Define the boundary of the register parm stack space that needs to be
2251 saved, if any. */
2252 int low_to_save, high_to_save;
2253 rtx save_area = 0; /* Place that it is saved */
2254 #endif
2256 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2257 char *initial_stack_usage_map = stack_usage_map;
2258 char *stack_usage_map_buf = NULL;
2260 int old_stack_allocated;
2262 /* State variables to track stack modifications. */
2263 rtx old_stack_level = 0;
2264 int old_stack_arg_under_construction = 0;
2265 int old_pending_adj = 0;
2266 int old_inhibit_defer_pop = inhibit_defer_pop;
2268 /* Some stack pointer alterations we make are performed via
2269 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2270 which we then also need to save/restore along the way. */
2271 int old_stack_pointer_delta = 0;
2273 rtx call_fusage;
2274 tree addr = CALL_EXPR_FN (exp);
2275 int i;
2276 /* The alignment of the stack, in bits. */
2277 unsigned HOST_WIDE_INT preferred_stack_boundary;
2278 /* The alignment of the stack, in bytes. */
2279 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
2280 /* The static chain value to use for this call. */
2281 rtx static_chain_value;
2282 /* See if this is "nothrow" function call. */
2283 if (TREE_NOTHROW (exp))
2284 flags |= ECF_NOTHROW;
2286 /* See if we can find a DECL-node for the actual function, and get the
2287 function attributes (flags) from the function decl or type node. */
2288 fndecl = get_callee_fndecl (exp);
2289 if (fndecl)
2291 fntype = TREE_TYPE (fndecl);
2292 flags |= flags_from_decl_or_type (fndecl);
2294 else
2296 fntype = TREE_TYPE (TREE_TYPE (addr));
2297 flags |= flags_from_decl_or_type (fntype);
2299 rettype = TREE_TYPE (exp);
2301 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
2303 /* Warn if this value is an aggregate type,
2304 regardless of which calling convention we are using for it. */
2305 if (AGGREGATE_TYPE_P (rettype))
2306 warning (OPT_Waggregate_return, "function call has aggregate value");
2308 /* If the result of a non looping pure or const function call is
2309 ignored (or void), and none of its arguments are volatile, we can
2310 avoid expanding the call and just evaluate the arguments for
2311 side-effects. */
2312 if ((flags & (ECF_CONST | ECF_PURE))
2313 && (!(flags & ECF_LOOPING_CONST_OR_PURE))
2314 && (ignore || target == const0_rtx
2315 || TYPE_MODE (rettype) == VOIDmode))
2317 bool volatilep = false;
2318 tree arg;
2319 call_expr_arg_iterator iter;
2321 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2322 if (TREE_THIS_VOLATILE (arg))
2324 volatilep = true;
2325 break;
2328 if (! volatilep)
2330 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2331 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
2332 return const0_rtx;
2336 #ifdef REG_PARM_STACK_SPACE
2337 reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
2338 #endif
2340 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
2341 && reg_parm_stack_space > 0 && PUSH_ARGS)
2342 must_preallocate = 1;
2344 /* Set up a place to return a structure. */
2346 /* Cater to broken compilers. */
2347 if (aggregate_value_p (exp, fntype))
2349 /* This call returns a big structure. */
2350 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
2352 #ifdef PCC_STATIC_STRUCT_RETURN
2354 pcc_struct_value = 1;
2356 #else /* not PCC_STATIC_STRUCT_RETURN */
2358 struct_value_size = int_size_in_bytes (rettype);
2360 if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp))
2361 structure_value_addr = XEXP (target, 0);
2362 else
2364 /* For variable-sized objects, we must be called with a target
2365 specified. If we were to allocate space on the stack here,
2366 we would have no way of knowing when to free it. */
2367 rtx d = assign_temp (rettype, 0, 1, 1);
2369 mark_temp_addr_taken (d);
2370 structure_value_addr = XEXP (d, 0);
2371 target = 0;
2374 #endif /* not PCC_STATIC_STRUCT_RETURN */
2377 /* Figure out the amount to which the stack should be aligned. */
2378 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2379 if (fndecl)
2381 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2382 /* Without automatic stack alignment, we can't increase preferred
2383 stack boundary. With automatic stack alignment, it is
2384 unnecessary since unless we can guarantee that all callers will
2385 align the outgoing stack properly, callee has to align its
2386 stack anyway. */
2387 if (i
2388 && i->preferred_incoming_stack_boundary
2389 && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
2390 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2393 /* Operand 0 is a pointer-to-function; get the type of the function. */
2394 funtype = TREE_TYPE (addr);
2395 gcc_assert (POINTER_TYPE_P (funtype));
2396 funtype = TREE_TYPE (funtype);
2398 /* Count whether there are actual complex arguments that need to be split
2399 into their real and imaginary parts. Munge the type_arg_types
2400 appropriately here as well. */
2401 if (targetm.calls.split_complex_arg)
2403 call_expr_arg_iterator iter;
2404 tree arg;
2405 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
2407 tree type = TREE_TYPE (arg);
2408 if (type && TREE_CODE (type) == COMPLEX_TYPE
2409 && targetm.calls.split_complex_arg (type))
2410 num_complex_actuals++;
2412 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2414 else
2415 type_arg_types = TYPE_ARG_TYPES (funtype);
2417 if (flags & ECF_MAY_BE_ALLOCA)
2418 cfun->calls_alloca = 1;
2420 /* If struct_value_rtx is 0, it means pass the address
2421 as if it were an extra parameter. Put the argument expression
2422 in structure_value_addr_value. */
2423 if (structure_value_addr && struct_value == 0)
2425 /* If structure_value_addr is a REG other than
2426 virtual_outgoing_args_rtx, we can use always use it. If it
2427 is not a REG, we must always copy it into a register.
2428 If it is virtual_outgoing_args_rtx, we must copy it to another
2429 register in some cases. */
2430 rtx temp = (!REG_P (structure_value_addr)
2431 || (ACCUMULATE_OUTGOING_ARGS
2432 && stack_arg_under_construction
2433 && structure_value_addr == virtual_outgoing_args_rtx)
2434 ? copy_addr_to_reg (convert_memory_address
2435 (Pmode, structure_value_addr))
2436 : structure_value_addr);
2438 structure_value_addr_value =
2439 make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
2440 structure_value_addr_parm = 1;
2443 /* Count the arguments and set NUM_ACTUALS. */
2444 num_actuals =
2445 call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
2447 /* Compute number of named args.
2448 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2450 if (type_arg_types != 0)
2451 n_named_args
2452 = (list_length (type_arg_types)
2453 /* Count the struct value address, if it is passed as a parm. */
2454 + structure_value_addr_parm);
2455 else
2456 /* If we know nothing, treat all args as named. */
2457 n_named_args = num_actuals;
2459 /* Start updating where the next arg would go.
2461 On some machines (such as the PA) indirect calls have a different
2462 calling convention than normal calls. The fourth argument in
2463 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2464 or not. */
2465 INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
2466 args_so_far = pack_cumulative_args (&args_so_far_v);
2468 /* Now possibly adjust the number of named args.
2469 Normally, don't include the last named arg if anonymous args follow.
2470 We do include the last named arg if
2471 targetm.calls.strict_argument_naming() returns nonzero.
2472 (If no anonymous args follow, the result of list_length is actually
2473 one too large. This is harmless.)
2475 If targetm.calls.pretend_outgoing_varargs_named() returns
2476 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2477 this machine will be able to place unnamed args that were passed
2478 in registers into the stack. So treat all args as named. This
2479 allows the insns emitting for a specific argument list to be
2480 independent of the function declaration.
2482 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2483 we do not have any reliable way to pass unnamed args in
2484 registers, so we must force them into memory. */
2486 if (type_arg_types != 0
2487 && targetm.calls.strict_argument_naming (args_so_far))
2489 else if (type_arg_types != 0
2490 && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
2491 /* Don't include the last named arg. */
2492 --n_named_args;
2493 else
2494 /* Treat all args as named. */
2495 n_named_args = num_actuals;
2497 /* Make a vector to hold all the information about each arg. */
2498 args = XALLOCAVEC (struct arg_data, num_actuals);
2499 memset (args, 0, num_actuals * sizeof (struct arg_data));
2501 /* Build up entries in the ARGS array, compute the size of the
2502 arguments into ARGS_SIZE, etc. */
2503 initialize_argument_information (num_actuals, args, &args_size,
2504 n_named_args, exp,
2505 structure_value_addr_value, fndecl, fntype,
2506 args_so_far, reg_parm_stack_space,
2507 &old_stack_level, &old_pending_adj,
2508 &must_preallocate, &flags,
2509 &try_tail_call, CALL_FROM_THUNK_P (exp));
2511 if (args_size.var)
2512 must_preallocate = 1;
2514 /* Now make final decision about preallocating stack space. */
2515 must_preallocate = finalize_must_preallocate (must_preallocate,
2516 num_actuals, args,
2517 &args_size);
2519 /* If the structure value address will reference the stack pointer, we
2520 must stabilize it. We don't need to do this if we know that we are
2521 not going to adjust the stack pointer in processing this call. */
2523 if (structure_value_addr
2524 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2525 || reg_mentioned_p (virtual_outgoing_args_rtx,
2526 structure_value_addr))
2527 && (args_size.var
2528 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2529 structure_value_addr = copy_to_reg (structure_value_addr);
2531 /* Tail calls can make things harder to debug, and we've traditionally
2532 pushed these optimizations into -O2. Don't try if we're already
2533 expanding a call, as that means we're an argument. Don't try if
2534 there's cleanups, as we know there's code to follow the call. */
2536 if (currently_expanding_call++ != 0
2537 || !flag_optimize_sibling_calls
2538 || args_size.var
2539 || dbg_cnt (tail_call) == false)
2540 try_tail_call = 0;
2542 /* Rest of purposes for tail call optimizations to fail. */
2543 if (
2544 #ifdef HAVE_sibcall_epilogue
2545 !HAVE_sibcall_epilogue
2546 #else
2548 #endif
2549 || !try_tail_call
2550 /* Doing sibling call optimization needs some work, since
2551 structure_value_addr can be allocated on the stack.
2552 It does not seem worth the effort since few optimizable
2553 sibling calls will return a structure. */
2554 || structure_value_addr != NULL_RTX
2555 #ifdef REG_PARM_STACK_SPACE
2556 /* If outgoing reg parm stack space changes, we can not do sibcall. */
2557 || (OUTGOING_REG_PARM_STACK_SPACE (funtype)
2558 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl)))
2559 || (reg_parm_stack_space != REG_PARM_STACK_SPACE (fndecl))
2560 #endif
2561 /* Check whether the target is able to optimize the call
2562 into a sibcall. */
2563 || !targetm.function_ok_for_sibcall (fndecl, exp)
2564 /* Functions that do not return exactly once may not be sibcall
2565 optimized. */
2566 || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
2567 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2568 /* If the called function is nested in the current one, it might access
2569 some of the caller's arguments, but could clobber them beforehand if
2570 the argument areas are shared. */
2571 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2572 /* If this function requires more stack slots than the current
2573 function, we cannot change it into a sibling call.
2574 crtl->args.pretend_args_size is not part of the
2575 stack allocated by our caller. */
2576 || args_size.constant > (crtl->args.size
2577 - crtl->args.pretend_args_size)
2578 /* If the callee pops its own arguments, then it must pop exactly
2579 the same number of arguments as the current function. */
2580 || (targetm.calls.return_pops_args (fndecl, funtype, args_size.constant)
2581 != targetm.calls.return_pops_args (current_function_decl,
2582 TREE_TYPE (current_function_decl),
2583 crtl->args.size))
2584 || !lang_hooks.decls.ok_for_sibcall (fndecl))
2585 try_tail_call = 0;
2587 /* Check if caller and callee disagree in promotion of function
2588 return value. */
2589 if (try_tail_call)
2591 enum machine_mode caller_mode, caller_promoted_mode;
2592 enum machine_mode callee_mode, callee_promoted_mode;
2593 int caller_unsignedp, callee_unsignedp;
2594 tree caller_res = DECL_RESULT (current_function_decl);
2596 caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
2597 caller_mode = DECL_MODE (caller_res);
2598 callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
2599 callee_mode = TYPE_MODE (TREE_TYPE (funtype));
2600 caller_promoted_mode
2601 = promote_function_mode (TREE_TYPE (caller_res), caller_mode,
2602 &caller_unsignedp,
2603 TREE_TYPE (current_function_decl), 1);
2604 callee_promoted_mode
2605 = promote_function_mode (TREE_TYPE (funtype), callee_mode,
2606 &callee_unsignedp,
2607 funtype, 1);
2608 if (caller_mode != VOIDmode
2609 && (caller_promoted_mode != callee_promoted_mode
2610 || ((caller_mode != caller_promoted_mode
2611 || callee_mode != callee_promoted_mode)
2612 && (caller_unsignedp != callee_unsignedp
2613 || GET_MODE_BITSIZE (caller_mode)
2614 < GET_MODE_BITSIZE (callee_mode)))))
2615 try_tail_call = 0;
2618 /* Ensure current function's preferred stack boundary is at least
2619 what we need. Stack alignment may also increase preferred stack
2620 boundary. */
2621 if (crtl->preferred_stack_boundary < preferred_stack_boundary)
2622 crtl->preferred_stack_boundary = preferred_stack_boundary;
2623 else
2624 preferred_stack_boundary = crtl->preferred_stack_boundary;
2626 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2628 /* We want to make two insn chains; one for a sibling call, the other
2629 for a normal call. We will select one of the two chains after
2630 initial RTL generation is complete. */
2631 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2633 int sibcall_failure = 0;
2634 /* We want to emit any pending stack adjustments before the tail
2635 recursion "call". That way we know any adjustment after the tail
2636 recursion call can be ignored if we indeed use the tail
2637 call expansion. */
2638 int save_pending_stack_adjust = 0;
2639 int save_stack_pointer_delta = 0;
2640 rtx insns;
2641 rtx before_call, next_arg_reg, after_args;
2643 if (pass == 0)
2645 /* State variables we need to save and restore between
2646 iterations. */
2647 save_pending_stack_adjust = pending_stack_adjust;
2648 save_stack_pointer_delta = stack_pointer_delta;
2650 if (pass)
2651 flags &= ~ECF_SIBCALL;
2652 else
2653 flags |= ECF_SIBCALL;
2655 /* Other state variables that we must reinitialize each time
2656 through the loop (that are not initialized by the loop itself). */
2657 argblock = 0;
2658 call_fusage = 0;
2660 /* Start a new sequence for the normal call case.
2662 From this point on, if the sibling call fails, we want to set
2663 sibcall_failure instead of continuing the loop. */
2664 start_sequence ();
2666 /* Don't let pending stack adjusts add up to too much.
2667 Also, do all pending adjustments now if there is any chance
2668 this might be a call to alloca or if we are expanding a sibling
2669 call sequence.
2670 Also do the adjustments before a throwing call, otherwise
2671 exception handling can fail; PR 19225. */
2672 if (pending_stack_adjust >= 32
2673 || (pending_stack_adjust > 0
2674 && (flags & ECF_MAY_BE_ALLOCA))
2675 || (pending_stack_adjust > 0
2676 && flag_exceptions && !(flags & ECF_NOTHROW))
2677 || pass == 0)
2678 do_pending_stack_adjust ();
2680 /* Precompute any arguments as needed. */
2681 if (pass)
2682 precompute_arguments (num_actuals, args);
2684 /* Now we are about to start emitting insns that can be deleted
2685 if a libcall is deleted. */
2686 if (pass && (flags & ECF_MALLOC))
2687 start_sequence ();
2689 if (pass == 0 && crtl->stack_protect_guard)
2690 stack_protect_epilogue ();
2692 adjusted_args_size = args_size;
2693 /* Compute the actual size of the argument block required. The variable
2694 and constant sizes must be combined, the size may have to be rounded,
2695 and there may be a minimum required size. When generating a sibcall
2696 pattern, do not round up, since we'll be re-using whatever space our
2697 caller provided. */
2698 unadjusted_args_size
2699 = compute_argument_block_size (reg_parm_stack_space,
2700 &adjusted_args_size,
2701 fndecl, fntype,
2702 (pass == 0 ? 0
2703 : preferred_stack_boundary));
2705 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2707 /* The argument block when performing a sibling call is the
2708 incoming argument block. */
2709 if (pass == 0)
2711 argblock = crtl->args.internal_arg_pointer;
2712 argblock
2713 #ifdef STACK_GROWS_DOWNWARD
2714 = plus_constant (argblock, crtl->args.pretend_args_size);
2715 #else
2716 = plus_constant (argblock, -crtl->args.pretend_args_size);
2717 #endif
2718 stored_args_map = sbitmap_alloc (args_size.constant);
2719 sbitmap_zero (stored_args_map);
2722 /* If we have no actual push instructions, or shouldn't use them,
2723 make space for all args right now. */
2724 else if (adjusted_args_size.var != 0)
2726 if (old_stack_level == 0)
2728 emit_stack_save (SAVE_BLOCK, &old_stack_level);
2729 old_stack_pointer_delta = stack_pointer_delta;
2730 old_pending_adj = pending_stack_adjust;
2731 pending_stack_adjust = 0;
2732 /* stack_arg_under_construction says whether a stack arg is
2733 being constructed at the old stack level. Pushing the stack
2734 gets a clean outgoing argument block. */
2735 old_stack_arg_under_construction = stack_arg_under_construction;
2736 stack_arg_under_construction = 0;
2738 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2739 if (flag_stack_usage_info)
2740 current_function_has_unbounded_dynamic_stack_size = 1;
2742 else
2744 /* Note that we must go through the motions of allocating an argument
2745 block even if the size is zero because we may be storing args
2746 in the area reserved for register arguments, which may be part of
2747 the stack frame. */
2749 int needed = adjusted_args_size.constant;
2751 /* Store the maximum argument space used. It will be pushed by
2752 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2753 checking). */
2755 if (needed > crtl->outgoing_args_size)
2756 crtl->outgoing_args_size = needed;
2758 if (must_preallocate)
2760 if (ACCUMULATE_OUTGOING_ARGS)
2762 /* Since the stack pointer will never be pushed, it is
2763 possible for the evaluation of a parm to clobber
2764 something we have already written to the stack.
2765 Since most function calls on RISC machines do not use
2766 the stack, this is uncommon, but must work correctly.
2768 Therefore, we save any area of the stack that was already
2769 written and that we are using. Here we set up to do this
2770 by making a new stack usage map from the old one. The
2771 actual save will be done by store_one_arg.
2773 Another approach might be to try to reorder the argument
2774 evaluations to avoid this conflicting stack usage. */
2776 /* Since we will be writing into the entire argument area,
2777 the map must be allocated for its entire size, not just
2778 the part that is the responsibility of the caller. */
2779 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2780 needed += reg_parm_stack_space;
2782 #ifdef ARGS_GROW_DOWNWARD
2783 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2784 needed + 1);
2785 #else
2786 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2787 needed);
2788 #endif
2789 free (stack_usage_map_buf);
2790 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
2791 stack_usage_map = stack_usage_map_buf;
2793 if (initial_highest_arg_in_use)
2794 memcpy (stack_usage_map, initial_stack_usage_map,
2795 initial_highest_arg_in_use);
2797 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2798 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2799 (highest_outgoing_arg_in_use
2800 - initial_highest_arg_in_use));
2801 needed = 0;
2803 /* The address of the outgoing argument list must not be
2804 copied to a register here, because argblock would be left
2805 pointing to the wrong place after the call to
2806 allocate_dynamic_stack_space below. */
2808 argblock = virtual_outgoing_args_rtx;
2810 else
2812 if (inhibit_defer_pop == 0)
2814 /* Try to reuse some or all of the pending_stack_adjust
2815 to get this space. */
2816 needed
2817 = (combine_pending_stack_adjustment_and_call
2818 (unadjusted_args_size,
2819 &adjusted_args_size,
2820 preferred_unit_stack_boundary));
2822 /* combine_pending_stack_adjustment_and_call computes
2823 an adjustment before the arguments are allocated.
2824 Account for them and see whether or not the stack
2825 needs to go up or down. */
2826 needed = unadjusted_args_size - needed;
2828 if (needed < 0)
2830 /* We're releasing stack space. */
2831 /* ??? We can avoid any adjustment at all if we're
2832 already aligned. FIXME. */
2833 pending_stack_adjust = -needed;
2834 do_pending_stack_adjust ();
2835 needed = 0;
2837 else
2838 /* We need to allocate space. We'll do that in
2839 push_block below. */
2840 pending_stack_adjust = 0;
2843 /* Special case this because overhead of `push_block' in
2844 this case is non-trivial. */
2845 if (needed == 0)
2846 argblock = virtual_outgoing_args_rtx;
2847 else
2849 argblock = push_block (GEN_INT (needed), 0, 0);
2850 #ifdef ARGS_GROW_DOWNWARD
2851 argblock = plus_constant (argblock, needed);
2852 #endif
2855 /* We only really need to call `copy_to_reg' in the case
2856 where push insns are going to be used to pass ARGBLOCK
2857 to a function call in ARGS. In that case, the stack
2858 pointer changes value from the allocation point to the
2859 call point, and hence the value of
2860 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2861 as well always do it. */
2862 argblock = copy_to_reg (argblock);
2867 if (ACCUMULATE_OUTGOING_ARGS)
2869 /* The save/restore code in store_one_arg handles all
2870 cases except one: a constructor call (including a C
2871 function returning a BLKmode struct) to initialize
2872 an argument. */
2873 if (stack_arg_under_construction)
2875 rtx push_size
2876 = GEN_INT (adjusted_args_size.constant
2877 + (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype
2878 : TREE_TYPE (fndecl))) ? 0
2879 : reg_parm_stack_space));
2880 if (old_stack_level == 0)
2882 emit_stack_save (SAVE_BLOCK, &old_stack_level);
2883 old_stack_pointer_delta = stack_pointer_delta;
2884 old_pending_adj = pending_stack_adjust;
2885 pending_stack_adjust = 0;
2886 /* stack_arg_under_construction says whether a stack
2887 arg is being constructed at the old stack level.
2888 Pushing the stack gets a clean outgoing argument
2889 block. */
2890 old_stack_arg_under_construction
2891 = stack_arg_under_construction;
2892 stack_arg_under_construction = 0;
2893 /* Make a new map for the new argument list. */
2894 free (stack_usage_map_buf);
2895 stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
2896 stack_usage_map = stack_usage_map_buf;
2897 highest_outgoing_arg_in_use = 0;
2899 /* We can pass TRUE as the 4th argument because we just
2900 saved the stack pointer and will restore it right after
2901 the call. */
2902 allocate_dynamic_stack_space (push_size, 0,
2903 BIGGEST_ALIGNMENT, true);
2906 /* If argument evaluation might modify the stack pointer,
2907 copy the address of the argument list to a register. */
2908 for (i = 0; i < num_actuals; i++)
2909 if (args[i].pass_on_stack)
2911 argblock = copy_addr_to_reg (argblock);
2912 break;
2916 compute_argument_addresses (args, argblock, num_actuals);
2918 /* If we push args individually in reverse order, perform stack alignment
2919 before the first push (the last arg). */
2920 if (PUSH_ARGS_REVERSED && argblock == 0
2921 && adjusted_args_size.constant != unadjusted_args_size)
2923 /* When the stack adjustment is pending, we get better code
2924 by combining the adjustments. */
2925 if (pending_stack_adjust
2926 && ! inhibit_defer_pop)
2928 pending_stack_adjust
2929 = (combine_pending_stack_adjustment_and_call
2930 (unadjusted_args_size,
2931 &adjusted_args_size,
2932 preferred_unit_stack_boundary));
2933 do_pending_stack_adjust ();
2935 else if (argblock == 0)
2936 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2937 - unadjusted_args_size));
2939 /* Now that the stack is properly aligned, pops can't safely
2940 be deferred during the evaluation of the arguments. */
2941 NO_DEFER_POP;
2943 /* Record the maximum pushed stack space size. We need to delay
2944 doing it this far to take into account the optimization done
2945 by combine_pending_stack_adjustment_and_call. */
2946 if (flag_stack_usage_info
2947 && !ACCUMULATE_OUTGOING_ARGS
2948 && pass
2949 && adjusted_args_size.var == 0)
2951 int pushed = adjusted_args_size.constant + pending_stack_adjust;
2952 if (pushed > current_function_pushed_stack_size)
2953 current_function_pushed_stack_size = pushed;
2956 funexp = rtx_for_function_call (fndecl, addr);
2958 /* Figure out the register where the value, if any, will come back. */
2959 valreg = 0;
2960 if (TYPE_MODE (rettype) != VOIDmode
2961 && ! structure_value_addr)
2963 if (pcc_struct_value)
2964 valreg = hard_function_value (build_pointer_type (rettype),
2965 fndecl, NULL, (pass == 0));
2966 else
2967 valreg = hard_function_value (rettype, fndecl, fntype,
2968 (pass == 0));
2970 /* If VALREG is a PARALLEL whose first member has a zero
2971 offset, use that. This is for targets such as m68k that
2972 return the same value in multiple places. */
2973 if (GET_CODE (valreg) == PARALLEL)
2975 rtx elem = XVECEXP (valreg, 0, 0);
2976 rtx where = XEXP (elem, 0);
2977 rtx offset = XEXP (elem, 1);
2978 if (offset == const0_rtx
2979 && GET_MODE (where) == GET_MODE (valreg))
2980 valreg = where;
2984 /* Precompute all register parameters. It isn't safe to compute anything
2985 once we have started filling any specific hard regs. */
2986 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2988 if (CALL_EXPR_STATIC_CHAIN (exp))
2989 static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
2990 else
2991 static_chain_value = 0;
2993 #ifdef REG_PARM_STACK_SPACE
2994 /* Save the fixed argument area if it's part of the caller's frame and
2995 is clobbered by argument setup for this call. */
2996 if (ACCUMULATE_OUTGOING_ARGS && pass)
2997 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2998 &low_to_save, &high_to_save);
2999 #endif
3001 /* Now store (and compute if necessary) all non-register parms.
3002 These come before register parms, since they can require block-moves,
3003 which could clobber the registers used for register parms.
3004 Parms which have partial registers are not stored here,
3005 but we do preallocate space here if they want that. */
3007 for (i = 0; i < num_actuals; i++)
3009 if (args[i].reg == 0 || args[i].pass_on_stack)
3011 rtx before_arg = get_last_insn ();
3013 if (store_one_arg (&args[i], argblock, flags,
3014 adjusted_args_size.var != 0,
3015 reg_parm_stack_space)
3016 || (pass == 0
3017 && check_sibcall_argument_overlap (before_arg,
3018 &args[i], 1)))
3019 sibcall_failure = 1;
3022 if (args[i].stack)
3023 call_fusage
3024 = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
3025 gen_rtx_USE (VOIDmode, args[i].stack),
3026 call_fusage);
3029 /* If we have a parm that is passed in registers but not in memory
3030 and whose alignment does not permit a direct copy into registers,
3031 make a group of pseudos that correspond to each register that we
3032 will later fill. */
3033 if (STRICT_ALIGNMENT)
3034 store_unaligned_arguments_into_pseudos (args, num_actuals);
3036 /* Now store any partially-in-registers parm.
3037 This is the last place a block-move can happen. */
3038 if (reg_parm_seen)
3039 for (i = 0; i < num_actuals; i++)
3040 if (args[i].partial != 0 && ! args[i].pass_on_stack)
3042 rtx before_arg = get_last_insn ();
3044 if (store_one_arg (&args[i], argblock, flags,
3045 adjusted_args_size.var != 0,
3046 reg_parm_stack_space)
3047 || (pass == 0
3048 && check_sibcall_argument_overlap (before_arg,
3049 &args[i], 1)))
3050 sibcall_failure = 1;
3053 /* If we pushed args in forward order, perform stack alignment
3054 after pushing the last arg. */
3055 if (!PUSH_ARGS_REVERSED && argblock == 0)
3056 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
3057 - unadjusted_args_size));
3059 /* If register arguments require space on the stack and stack space
3060 was not preallocated, allocate stack space here for arguments
3061 passed in registers. */
3062 if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
3063 && !ACCUMULATE_OUTGOING_ARGS
3064 && must_preallocate == 0 && reg_parm_stack_space > 0)
3065 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
3067 /* Pass the function the address in which to return a
3068 structure value. */
3069 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3071 structure_value_addr
3072 = convert_memory_address (Pmode, structure_value_addr);
3073 emit_move_insn (struct_value,
3074 force_reg (Pmode,
3075 force_operand (structure_value_addr,
3076 NULL_RTX)));
3078 if (REG_P (struct_value))
3079 use_reg (&call_fusage, struct_value);
3082 after_args = get_last_insn ();
3083 funexp = prepare_call_address (fndecl, funexp, static_chain_value,
3084 &call_fusage, reg_parm_seen, pass == 0);
3086 load_register_parameters (args, num_actuals, &call_fusage, flags,
3087 pass == 0, &sibcall_failure);
3089 /* Save a pointer to the last insn before the call, so that we can
3090 later safely search backwards to find the CALL_INSN. */
3091 before_call = get_last_insn ();
3093 /* Set up next argument register. For sibling calls on machines
3094 with register windows this should be the incoming register. */
3095 if (pass == 0)
3096 next_arg_reg = targetm.calls.function_incoming_arg (args_so_far,
3097 VOIDmode,
3098 void_type_node,
3099 true);
3100 else
3101 next_arg_reg = targetm.calls.function_arg (args_so_far,
3102 VOIDmode, void_type_node,
3103 true);
3105 /* All arguments and registers used for the call must be set up by
3106 now! */
3108 /* Stack must be properly aligned now. */
3109 gcc_assert (!pass
3110 || !(stack_pointer_delta % preferred_unit_stack_boundary));
3112 /* Generate the actual call instruction. */
3113 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
3114 adjusted_args_size.constant, struct_value_size,
3115 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
3116 flags, args_so_far);
3118 /* If the call setup or the call itself overlaps with anything
3119 of the argument setup we probably clobbered our call address.
3120 In that case we can't do sibcalls. */
3121 if (pass == 0
3122 && check_sibcall_argument_overlap (after_args, 0, 0))
3123 sibcall_failure = 1;
3125 /* If a non-BLKmode value is returned at the most significant end
3126 of a register, shift the register right by the appropriate amount
3127 and update VALREG accordingly. BLKmode values are handled by the
3128 group load/store machinery below. */
3129 if (!structure_value_addr
3130 && !pcc_struct_value
3131 && TYPE_MODE (rettype) != BLKmode
3132 && targetm.calls.return_in_msb (rettype))
3134 if (shift_return_value (TYPE_MODE (rettype), false, valreg))
3135 sibcall_failure = 1;
3136 valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
3139 if (pass && (flags & ECF_MALLOC))
3141 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3142 rtx last, insns;
3144 /* The return value from a malloc-like function is a pointer. */
3145 if (TREE_CODE (rettype) == POINTER_TYPE)
3146 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
3148 emit_move_insn (temp, valreg);
3150 /* The return value from a malloc-like function can not alias
3151 anything else. */
3152 last = get_last_insn ();
3153 add_reg_note (last, REG_NOALIAS, temp);
3155 /* Write out the sequence. */
3156 insns = get_insns ();
3157 end_sequence ();
3158 emit_insn (insns);
3159 valreg = temp;
3162 /* For calls to `setjmp', etc., inform
3163 function.c:setjmp_warnings that it should complain if
3164 nonvolatile values are live. For functions that cannot
3165 return, inform flow that control does not fall through. */
3167 if ((flags & ECF_NORETURN) || pass == 0)
3169 /* The barrier must be emitted
3170 immediately after the CALL_INSN. Some ports emit more
3171 than just a CALL_INSN above, so we must search for it here. */
3173 rtx last = get_last_insn ();
3174 while (!CALL_P (last))
3176 last = PREV_INSN (last);
3177 /* There was no CALL_INSN? */
3178 gcc_assert (last != before_call);
3181 emit_barrier_after (last);
3183 /* Stack adjustments after a noreturn call are dead code.
3184 However when NO_DEFER_POP is in effect, we must preserve
3185 stack_pointer_delta. */
3186 if (inhibit_defer_pop == 0)
3188 stack_pointer_delta = old_stack_allocated;
3189 pending_stack_adjust = 0;
3193 /* If value type not void, return an rtx for the value. */
3195 if (TYPE_MODE (rettype) == VOIDmode
3196 || ignore)
3197 target = const0_rtx;
3198 else if (structure_value_addr)
3200 if (target == 0 || !MEM_P (target))
3202 target
3203 = gen_rtx_MEM (TYPE_MODE (rettype),
3204 memory_address (TYPE_MODE (rettype),
3205 structure_value_addr));
3206 set_mem_attributes (target, rettype, 1);
3209 else if (pcc_struct_value)
3211 /* This is the special C++ case where we need to
3212 know what the true target was. We take care to
3213 never use this value more than once in one expression. */
3214 target = gen_rtx_MEM (TYPE_MODE (rettype),
3215 copy_to_reg (valreg));
3216 set_mem_attributes (target, rettype, 1);
3218 /* Handle calls that return values in multiple non-contiguous locations.
3219 The Irix 6 ABI has examples of this. */
3220 else if (GET_CODE (valreg) == PARALLEL)
3222 if (target == 0)
3224 /* This will only be assigned once, so it can be readonly. */
3225 tree nt = build_qualified_type (rettype,
3226 (TYPE_QUALS (rettype)
3227 | TYPE_QUAL_CONST));
3229 target = assign_temp (nt, 0, 1, 1);
3232 if (! rtx_equal_p (target, valreg))
3233 emit_group_store (target, valreg, rettype,
3234 int_size_in_bytes (rettype));
3236 /* We can not support sibling calls for this case. */
3237 sibcall_failure = 1;
3239 else if (target
3240 && GET_MODE (target) == TYPE_MODE (rettype)
3241 && GET_MODE (target) == GET_MODE (valreg))
3243 bool may_overlap = false;
3245 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
3246 reg to a plain register. */
3247 if (!REG_P (target) || HARD_REGISTER_P (target))
3248 valreg = avoid_likely_spilled_reg (valreg);
3250 /* If TARGET is a MEM in the argument area, and we have
3251 saved part of the argument area, then we can't store
3252 directly into TARGET as it may get overwritten when we
3253 restore the argument save area below. Don't work too
3254 hard though and simply force TARGET to a register if it
3255 is a MEM; the optimizer is quite likely to sort it out. */
3256 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
3257 for (i = 0; i < num_actuals; i++)
3258 if (args[i].save_area)
3260 may_overlap = true;
3261 break;
3264 if (may_overlap)
3265 target = copy_to_reg (valreg);
3266 else
3268 /* TARGET and VALREG cannot be equal at this point
3269 because the latter would not have
3270 REG_FUNCTION_VALUE_P true, while the former would if
3271 it were referring to the same register.
3273 If they refer to the same register, this move will be
3274 a no-op, except when function inlining is being
3275 done. */
3276 emit_move_insn (target, valreg);
3278 /* If we are setting a MEM, this code must be executed.
3279 Since it is emitted after the call insn, sibcall
3280 optimization cannot be performed in that case. */
3281 if (MEM_P (target))
3282 sibcall_failure = 1;
3285 else if (TYPE_MODE (rettype) == BLKmode)
3287 rtx val = valreg;
3288 if (GET_MODE (val) != BLKmode)
3289 val = avoid_likely_spilled_reg (val);
3290 target = copy_blkmode_from_reg (target, val, rettype);
3292 /* We can not support sibling calls for this case. */
3293 sibcall_failure = 1;
3295 else
3296 target = copy_to_reg (avoid_likely_spilled_reg (valreg));
3298 /* If we promoted this return value, make the proper SUBREG.
3299 TARGET might be const0_rtx here, so be careful. */
3300 if (REG_P (target)
3301 && TYPE_MODE (rettype) != BLKmode
3302 && GET_MODE (target) != TYPE_MODE (rettype))
3304 tree type = rettype;
3305 int unsignedp = TYPE_UNSIGNED (type);
3306 int offset = 0;
3307 enum machine_mode pmode;
3309 /* Ensure we promote as expected, and get the new unsignedness. */
3310 pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
3311 funtype, 1);
3312 gcc_assert (GET_MODE (target) == pmode);
3314 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3315 && (GET_MODE_SIZE (GET_MODE (target))
3316 > GET_MODE_SIZE (TYPE_MODE (type))))
3318 offset = GET_MODE_SIZE (GET_MODE (target))
3319 - GET_MODE_SIZE (TYPE_MODE (type));
3320 if (! BYTES_BIG_ENDIAN)
3321 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3322 else if (! WORDS_BIG_ENDIAN)
3323 offset %= UNITS_PER_WORD;
3326 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3327 SUBREG_PROMOTED_VAR_P (target) = 1;
3328 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3331 /* If size of args is variable or this was a constructor call for a stack
3332 argument, restore saved stack-pointer value. */
3334 if (old_stack_level)
3336 rtx prev = get_last_insn ();
3338 emit_stack_restore (SAVE_BLOCK, old_stack_level);
3339 stack_pointer_delta = old_stack_pointer_delta;
3341 fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
3343 pending_stack_adjust = old_pending_adj;
3344 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
3345 stack_arg_under_construction = old_stack_arg_under_construction;
3346 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3347 stack_usage_map = initial_stack_usage_map;
3348 sibcall_failure = 1;
3350 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3352 #ifdef REG_PARM_STACK_SPACE
3353 if (save_area)
3354 restore_fixed_argument_area (save_area, argblock,
3355 high_to_save, low_to_save);
3356 #endif
3358 /* If we saved any argument areas, restore them. */
3359 for (i = 0; i < num_actuals; i++)
3360 if (args[i].save_area)
3362 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3363 rtx stack_area
3364 = gen_rtx_MEM (save_mode,
3365 memory_address (save_mode,
3366 XEXP (args[i].stack_slot, 0)));
3368 if (save_mode != BLKmode)
3369 emit_move_insn (stack_area, args[i].save_area);
3370 else
3371 emit_block_move (stack_area, args[i].save_area,
3372 GEN_INT (args[i].locate.size.constant),
3373 BLOCK_OP_CALL_PARM);
3376 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3377 stack_usage_map = initial_stack_usage_map;
3380 /* If this was alloca, record the new stack level for nonlocal gotos.
3381 Check for the handler slots since we might not have a save area
3382 for non-local gotos. */
3384 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
3385 update_nonlocal_goto_save_area ();
3387 /* Free up storage we no longer need. */
3388 for (i = 0; i < num_actuals; ++i)
3389 free (args[i].aligned_regs);
3391 insns = get_insns ();
3392 end_sequence ();
3394 if (pass == 0)
3396 tail_call_insns = insns;
3398 /* Restore the pending stack adjustment now that we have
3399 finished generating the sibling call sequence. */
3401 pending_stack_adjust = save_pending_stack_adjust;
3402 stack_pointer_delta = save_stack_pointer_delta;
3404 /* Prepare arg structure for next iteration. */
3405 for (i = 0; i < num_actuals; i++)
3407 args[i].value = 0;
3408 args[i].aligned_regs = 0;
3409 args[i].stack = 0;
3412 sbitmap_free (stored_args_map);
3413 internal_arg_pointer_exp_state.scan_start = NULL_RTX;
3414 VEC_free (rtx, heap, internal_arg_pointer_exp_state.cache);
3416 else
3418 normal_call_insns = insns;
3420 /* Verify that we've deallocated all the stack we used. */
3421 gcc_assert ((flags & ECF_NORETURN)
3422 || (old_stack_allocated
3423 == stack_pointer_delta - pending_stack_adjust));
3426 /* If something prevents making this a sibling call,
3427 zero out the sequence. */
3428 if (sibcall_failure)
3429 tail_call_insns = NULL_RTX;
3430 else
3431 break;
3434 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3435 arguments too, as argument area is now clobbered by the call. */
3436 if (tail_call_insns)
3438 emit_insn (tail_call_insns);
3439 crtl->tail_call_emit = true;
3441 else
3442 emit_insn (normal_call_insns);
3444 currently_expanding_call--;
3446 free (stack_usage_map_buf);
3448 return target;
3451 /* A sibling call sequence invalidates any REG_EQUIV notes made for
3452 this function's incoming arguments.
3454 At the start of RTL generation we know the only REG_EQUIV notes
3455 in the rtl chain are those for incoming arguments, so we can look
3456 for REG_EQUIV notes between the start of the function and the
3457 NOTE_INSN_FUNCTION_BEG.
3459 This is (slight) overkill. We could keep track of the highest
3460 argument we clobber and be more selective in removing notes, but it
3461 does not seem to be worth the effort. */
3463 void
3464 fixup_tail_calls (void)
3466 rtx insn;
3468 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3470 rtx note;
3472 /* There are never REG_EQUIV notes for the incoming arguments
3473 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
3474 if (NOTE_P (insn)
3475 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
3476 break;
3478 note = find_reg_note (insn, REG_EQUIV, 0);
3479 if (note)
3480 remove_note (insn, note);
3481 note = find_reg_note (insn, REG_EQUIV, 0);
3482 gcc_assert (!note);
3486 /* Traverse a list of TYPES and expand all complex types into their
3487 components. */
3488 static tree
3489 split_complex_types (tree types)
3491 tree p;
3493 /* Before allocating memory, check for the common case of no complex. */
3494 for (p = types; p; p = TREE_CHAIN (p))
3496 tree type = TREE_VALUE (p);
3497 if (TREE_CODE (type) == COMPLEX_TYPE
3498 && targetm.calls.split_complex_arg (type))
3499 goto found;
3501 return types;
3503 found:
3504 types = copy_list (types);
3506 for (p = types; p; p = TREE_CHAIN (p))
3508 tree complex_type = TREE_VALUE (p);
3510 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3511 && targetm.calls.split_complex_arg (complex_type))
3513 tree next, imag;
3515 /* Rewrite complex type with component type. */
3516 TREE_VALUE (p) = TREE_TYPE (complex_type);
3517 next = TREE_CHAIN (p);
3519 /* Add another component type for the imaginary part. */
3520 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3521 TREE_CHAIN (p) = imag;
3522 TREE_CHAIN (imag) = next;
3524 /* Skip the newly created node. */
3525 p = TREE_CHAIN (p);
3529 return types;
3532 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3533 The RETVAL parameter specifies whether return value needs to be saved, other
3534 parameters are documented in the emit_library_call function below. */
3536 static rtx
3537 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3538 enum libcall_type fn_type,
3539 enum machine_mode outmode, int nargs, va_list p)
3541 /* Total size in bytes of all the stack-parms scanned so far. */
3542 struct args_size args_size;
3543 /* Size of arguments before any adjustments (such as rounding). */
3544 struct args_size original_args_size;
3545 int argnum;
3546 rtx fun;
3547 /* Todo, choose the correct decl type of orgfun. Sadly this information
3548 isn't present here, so we default to native calling abi here. */
3549 tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
3550 tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
3551 int inc;
3552 int count;
3553 rtx argblock = 0;
3554 CUMULATIVE_ARGS args_so_far_v;
3555 cumulative_args_t args_so_far;
3556 struct arg
3558 rtx value;
3559 enum machine_mode mode;
3560 rtx reg;
3561 int partial;
3562 struct locate_and_pad_arg_data locate;
3563 rtx save_area;
3565 struct arg *argvec;
3566 int old_inhibit_defer_pop = inhibit_defer_pop;
3567 rtx call_fusage = 0;
3568 rtx mem_value = 0;
3569 rtx valreg;
3570 int pcc_struct_value = 0;
3571 int struct_value_size = 0;
3572 int flags;
3573 int reg_parm_stack_space = 0;
3574 int needed;
3575 rtx before_call;
3576 tree tfom; /* type_for_mode (outmode, 0) */
3578 #ifdef REG_PARM_STACK_SPACE
3579 /* Define the boundary of the register parm stack space that needs to be
3580 save, if any. */
3581 int low_to_save = 0, high_to_save = 0;
3582 rtx save_area = 0; /* Place that it is saved. */
3583 #endif
3585 /* Size of the stack reserved for parameter registers. */
3586 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3587 char *initial_stack_usage_map = stack_usage_map;
3588 char *stack_usage_map_buf = NULL;
3590 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3592 #ifdef REG_PARM_STACK_SPACE
3593 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3594 #endif
3596 /* By default, library functions can not throw. */
3597 flags = ECF_NOTHROW;
3599 switch (fn_type)
3601 case LCT_NORMAL:
3602 break;
3603 case LCT_CONST:
3604 flags |= ECF_CONST;
3605 break;
3606 case LCT_PURE:
3607 flags |= ECF_PURE;
3608 break;
3609 case LCT_NORETURN:
3610 flags |= ECF_NORETURN;
3611 break;
3612 case LCT_THROW:
3613 flags = ECF_NORETURN;
3614 break;
3615 case LCT_RETURNS_TWICE:
3616 flags = ECF_RETURNS_TWICE;
3617 break;
3619 fun = orgfun;
3621 /* Ensure current function's preferred stack boundary is at least
3622 what we need. */
3623 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3624 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3626 /* If this kind of value comes back in memory,
3627 decide where in memory it should come back. */
3628 if (outmode != VOIDmode)
3630 tfom = lang_hooks.types.type_for_mode (outmode, 0);
3631 if (aggregate_value_p (tfom, 0))
3633 #ifdef PCC_STATIC_STRUCT_RETURN
3634 rtx pointer_reg
3635 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
3636 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3637 pcc_struct_value = 1;
3638 if (value == 0)
3639 value = gen_reg_rtx (outmode);
3640 #else /* not PCC_STATIC_STRUCT_RETURN */
3641 struct_value_size = GET_MODE_SIZE (outmode);
3642 if (value != 0 && MEM_P (value))
3643 mem_value = value;
3644 else
3645 mem_value = assign_temp (tfom, 0, 1, 1);
3646 #endif
3647 /* This call returns a big structure. */
3648 flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
3651 else
3652 tfom = void_type_node;
3654 /* ??? Unfinished: must pass the memory address as an argument. */
3656 /* Copy all the libcall-arguments out of the varargs data
3657 and into a vector ARGVEC.
3659 Compute how to pass each argument. We only support a very small subset
3660 of the full argument passing conventions to limit complexity here since
3661 library functions shouldn't have many args. */
3663 argvec = XALLOCAVEC (struct arg, nargs + 1);
3664 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3666 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3667 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
3668 #else
3669 INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
3670 #endif
3671 args_so_far = pack_cumulative_args (&args_so_far_v);
3673 args_size.constant = 0;
3674 args_size.var = 0;
3676 count = 0;
3678 push_temp_slots ();
3680 /* If there's a structure value address to be passed,
3681 either pass it in the special place, or pass it as an extra argument. */
3682 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3684 rtx addr = XEXP (mem_value, 0);
3686 nargs++;
3688 /* Make sure it is a reasonable operand for a move or push insn. */
3689 if (!REG_P (addr) && !MEM_P (addr)
3690 && !(CONSTANT_P (addr)
3691 && targetm.legitimate_constant_p (Pmode, addr)))
3692 addr = force_operand (addr, NULL_RTX);
3694 argvec[count].value = addr;
3695 argvec[count].mode = Pmode;
3696 argvec[count].partial = 0;
3698 argvec[count].reg = targetm.calls.function_arg (args_so_far,
3699 Pmode, NULL_TREE, true);
3700 gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, Pmode,
3701 NULL_TREE, 1) == 0);
3703 locate_and_pad_parm (Pmode, NULL_TREE,
3704 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3706 #else
3707 argvec[count].reg != 0,
3708 #endif
3709 0, NULL_TREE, &args_size, &argvec[count].locate);
3711 if (argvec[count].reg == 0 || argvec[count].partial != 0
3712 || reg_parm_stack_space > 0)
3713 args_size.constant += argvec[count].locate.size.constant;
3715 targetm.calls.function_arg_advance (args_so_far, Pmode, (tree) 0, true);
3717 count++;
3720 for (; count < nargs; count++)
3722 rtx val = va_arg (p, rtx);
3723 enum machine_mode mode = (enum machine_mode) va_arg (p, int);
3724 int unsigned_p = 0;
3726 /* We cannot convert the arg value to the mode the library wants here;
3727 must do it earlier where we know the signedness of the arg. */
3728 gcc_assert (mode != BLKmode
3729 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
3731 /* Make sure it is a reasonable operand for a move or push insn. */
3732 if (!REG_P (val) && !MEM_P (val)
3733 && !(CONSTANT_P (val) && targetm.legitimate_constant_p (mode, val)))
3734 val = force_operand (val, NULL_RTX);
3736 if (pass_by_reference (&args_so_far_v, mode, NULL_TREE, 1))
3738 rtx slot;
3739 int must_copy
3740 = !reference_callee_copied (&args_so_far_v, mode, NULL_TREE, 1);
3742 /* If this was a CONST function, it is now PURE since it now
3743 reads memory. */
3744 if (flags & ECF_CONST)
3746 flags &= ~ECF_CONST;
3747 flags |= ECF_PURE;
3750 if (MEM_P (val) && !must_copy)
3752 tree val_expr = MEM_EXPR (val);
3753 if (val_expr)
3754 mark_addressable (val_expr);
3755 slot = val;
3757 else
3759 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
3760 0, 1, 1);
3761 emit_move_insn (slot, val);
3764 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3765 gen_rtx_USE (VOIDmode, slot),
3766 call_fusage);
3767 if (must_copy)
3768 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3769 gen_rtx_CLOBBER (VOIDmode,
3770 slot),
3771 call_fusage);
3773 mode = Pmode;
3774 val = force_operand (XEXP (slot, 0), NULL_RTX);
3777 mode = promote_function_mode (NULL_TREE, mode, &unsigned_p, NULL_TREE, 0);
3778 argvec[count].mode = mode;
3779 argvec[count].value = convert_modes (mode, GET_MODE (val), val, unsigned_p);
3780 argvec[count].reg = targetm.calls.function_arg (args_so_far, mode,
3781 NULL_TREE, true);
3783 argvec[count].partial
3784 = targetm.calls.arg_partial_bytes (args_so_far, mode, NULL_TREE, 1);
3786 if (argvec[count].reg == 0
3787 || argvec[count].partial != 0
3788 || reg_parm_stack_space > 0)
3790 locate_and_pad_parm (mode, NULL_TREE,
3791 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3793 #else
3794 argvec[count].reg != 0,
3795 #endif
3796 argvec[count].partial,
3797 NULL_TREE, &args_size, &argvec[count].locate);
3798 args_size.constant += argvec[count].locate.size.constant;
3799 gcc_assert (!argvec[count].locate.size.var);
3801 #ifdef BLOCK_REG_PADDING
3802 else
3803 /* The argument is passed entirely in registers. See at which
3804 end it should be padded. */
3805 argvec[count].locate.where_pad =
3806 BLOCK_REG_PADDING (mode, NULL_TREE,
3807 GET_MODE_SIZE (mode) <= UNITS_PER_WORD);
3808 #endif
3810 targetm.calls.function_arg_advance (args_so_far, mode, (tree) 0, true);
3813 /* If this machine requires an external definition for library
3814 functions, write one out. */
3815 assemble_external_libcall (fun);
3817 original_args_size = args_size;
3818 args_size.constant = (((args_size.constant
3819 + stack_pointer_delta
3820 + STACK_BYTES - 1)
3821 / STACK_BYTES
3822 * STACK_BYTES)
3823 - stack_pointer_delta);
3825 args_size.constant = MAX (args_size.constant,
3826 reg_parm_stack_space);
3828 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
3829 args_size.constant -= reg_parm_stack_space;
3831 if (args_size.constant > crtl->outgoing_args_size)
3832 crtl->outgoing_args_size = args_size.constant;
3834 if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
3836 int pushed = args_size.constant + pending_stack_adjust;
3837 if (pushed > current_function_pushed_stack_size)
3838 current_function_pushed_stack_size = pushed;
3841 if (ACCUMULATE_OUTGOING_ARGS)
3843 /* Since the stack pointer will never be pushed, it is possible for
3844 the evaluation of a parm to clobber something we have already
3845 written to the stack. Since most function calls on RISC machines
3846 do not use the stack, this is uncommon, but must work correctly.
3848 Therefore, we save any area of the stack that was already written
3849 and that we are using. Here we set up to do this by making a new
3850 stack usage map from the old one.
3852 Another approach might be to try to reorder the argument
3853 evaluations to avoid this conflicting stack usage. */
3855 needed = args_size.constant;
3857 /* Since we will be writing into the entire argument area, the
3858 map must be allocated for its entire size, not just the part that
3859 is the responsibility of the caller. */
3860 if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
3861 needed += reg_parm_stack_space;
3863 #ifdef ARGS_GROW_DOWNWARD
3864 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3865 needed + 1);
3866 #else
3867 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3868 needed);
3869 #endif
3870 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
3871 stack_usage_map = stack_usage_map_buf;
3873 if (initial_highest_arg_in_use)
3874 memcpy (stack_usage_map, initial_stack_usage_map,
3875 initial_highest_arg_in_use);
3877 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3878 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3879 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3880 needed = 0;
3882 /* We must be careful to use virtual regs before they're instantiated,
3883 and real regs afterwards. Loop optimization, for example, can create
3884 new libcalls after we've instantiated the virtual regs, and if we
3885 use virtuals anyway, they won't match the rtl patterns. */
3887 if (virtuals_instantiated)
3888 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3889 else
3890 argblock = virtual_outgoing_args_rtx;
3892 else
3894 if (!PUSH_ARGS)
3895 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3898 /* If we push args individually in reverse order, perform stack alignment
3899 before the first push (the last arg). */
3900 if (argblock == 0 && PUSH_ARGS_REVERSED)
3901 anti_adjust_stack (GEN_INT (args_size.constant
3902 - original_args_size.constant));
3904 if (PUSH_ARGS_REVERSED)
3906 inc = -1;
3907 argnum = nargs - 1;
3909 else
3911 inc = 1;
3912 argnum = 0;
3915 #ifdef REG_PARM_STACK_SPACE
3916 if (ACCUMULATE_OUTGOING_ARGS)
3918 /* The argument list is the property of the called routine and it
3919 may clobber it. If the fixed area has been used for previous
3920 parameters, we must save and restore it. */
3921 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3922 &low_to_save, &high_to_save);
3924 #endif
3926 /* Push the args that need to be pushed. */
3928 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3929 are to be pushed. */
3930 for (count = 0; count < nargs; count++, argnum += inc)
3932 enum machine_mode mode = argvec[argnum].mode;
3933 rtx val = argvec[argnum].value;
3934 rtx reg = argvec[argnum].reg;
3935 int partial = argvec[argnum].partial;
3936 unsigned int parm_align = argvec[argnum].locate.boundary;
3937 int lower_bound = 0, upper_bound = 0, i;
3939 if (! (reg != 0 && partial == 0))
3941 rtx use;
3943 if (ACCUMULATE_OUTGOING_ARGS)
3945 /* If this is being stored into a pre-allocated, fixed-size,
3946 stack area, save any previous data at that location. */
3948 #ifdef ARGS_GROW_DOWNWARD
3949 /* stack_slot is negative, but we want to index stack_usage_map
3950 with positive values. */
3951 upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
3952 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3953 #else
3954 lower_bound = argvec[argnum].locate.slot_offset.constant;
3955 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3956 #endif
3958 i = lower_bound;
3959 /* Don't worry about things in the fixed argument area;
3960 it has already been saved. */
3961 if (i < reg_parm_stack_space)
3962 i = reg_parm_stack_space;
3963 while (i < upper_bound && stack_usage_map[i] == 0)
3964 i++;
3966 if (i < upper_bound)
3968 /* We need to make a save area. */
3969 unsigned int size
3970 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3971 enum machine_mode save_mode
3972 = mode_for_size (size, MODE_INT, 1);
3973 rtx adr
3974 = plus_constant (argblock,
3975 argvec[argnum].locate.offset.constant);
3976 rtx stack_area
3977 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
3979 if (save_mode == BLKmode)
3981 argvec[argnum].save_area
3982 = assign_stack_temp (BLKmode,
3983 argvec[argnum].locate.size.constant,
3986 emit_block_move (validize_mem (argvec[argnum].save_area),
3987 stack_area,
3988 GEN_INT (argvec[argnum].locate.size.constant),
3989 BLOCK_OP_CALL_PARM);
3991 else
3993 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3995 emit_move_insn (argvec[argnum].save_area, stack_area);
4000 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
4001 partial, reg, 0, argblock,
4002 GEN_INT (argvec[argnum].locate.offset.constant),
4003 reg_parm_stack_space,
4004 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
4006 /* Now mark the segment we just used. */
4007 if (ACCUMULATE_OUTGOING_ARGS)
4008 for (i = lower_bound; i < upper_bound; i++)
4009 stack_usage_map[i] = 1;
4011 NO_DEFER_POP;
4013 /* Indicate argument access so that alias.c knows that these
4014 values are live. */
4015 if (argblock)
4016 use = plus_constant (argblock,
4017 argvec[argnum].locate.offset.constant);
4018 else
4019 /* When arguments are pushed, trying to tell alias.c where
4020 exactly this argument is won't work, because the
4021 auto-increment causes confusion. So we merely indicate
4022 that we access something with a known mode somewhere on
4023 the stack. */
4024 use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
4025 gen_rtx_SCRATCH (Pmode));
4026 use = gen_rtx_MEM (argvec[argnum].mode, use);
4027 use = gen_rtx_USE (VOIDmode, use);
4028 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
4032 /* If we pushed args in forward order, perform stack alignment
4033 after pushing the last arg. */
4034 if (argblock == 0 && !PUSH_ARGS_REVERSED)
4035 anti_adjust_stack (GEN_INT (args_size.constant
4036 - original_args_size.constant));
4038 if (PUSH_ARGS_REVERSED)
4039 argnum = nargs - 1;
4040 else
4041 argnum = 0;
4043 fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
4045 /* Now load any reg parms into their regs. */
4047 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4048 are to be pushed. */
4049 for (count = 0; count < nargs; count++, argnum += inc)
4051 enum machine_mode mode = argvec[argnum].mode;
4052 rtx val = argvec[argnum].value;
4053 rtx reg = argvec[argnum].reg;
4054 int partial = argvec[argnum].partial;
4055 #ifdef BLOCK_REG_PADDING
4056 int size = 0;
4057 #endif
4059 /* Handle calls that pass values in multiple non-contiguous
4060 locations. The PA64 has examples of this for library calls. */
4061 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4062 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
4063 else if (reg != 0 && partial == 0)
4065 emit_move_insn (reg, val);
4066 #ifdef BLOCK_REG_PADDING
4067 size = GET_MODE_SIZE (argvec[argnum].mode);
4069 /* Copied from load_register_parameters. */
4071 /* Handle case where we have a value that needs shifting
4072 up to the msb. eg. a QImode value and we're padding
4073 upward on a BYTES_BIG_ENDIAN machine. */
4074 if (size < UNITS_PER_WORD
4075 && (argvec[argnum].locate.where_pad
4076 == (BYTES_BIG_ENDIAN ? upward : downward)))
4078 rtx x;
4079 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
4081 /* Assigning REG here rather than a temp makes CALL_FUSAGE
4082 report the whole reg as used. Strictly speaking, the
4083 call only uses SIZE bytes at the msb end, but it doesn't
4084 seem worth generating rtl to say that. */
4085 reg = gen_rtx_REG (word_mode, REGNO (reg));
4086 x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
4087 if (x != reg)
4088 emit_move_insn (reg, x);
4090 #endif
4093 NO_DEFER_POP;
4096 /* Any regs containing parms remain in use through the call. */
4097 for (count = 0; count < nargs; count++)
4099 rtx reg = argvec[count].reg;
4100 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4101 use_group_regs (&call_fusage, reg);
4102 else if (reg != 0)
4104 int partial = argvec[count].partial;
4105 if (partial)
4107 int nregs;
4108 gcc_assert (partial % UNITS_PER_WORD == 0);
4109 nregs = partial / UNITS_PER_WORD;
4110 use_regs (&call_fusage, REGNO (reg), nregs);
4112 else
4113 use_reg (&call_fusage, reg);
4117 /* Pass the function the address in which to return a structure value. */
4118 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
4120 emit_move_insn (struct_value,
4121 force_reg (Pmode,
4122 force_operand (XEXP (mem_value, 0),
4123 NULL_RTX)));
4124 if (REG_P (struct_value))
4125 use_reg (&call_fusage, struct_value);
4128 /* Don't allow popping to be deferred, since then
4129 cse'ing of library calls could delete a call and leave the pop. */
4130 NO_DEFER_POP;
4131 valreg = (mem_value == 0 && outmode != VOIDmode
4132 ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
4134 /* Stack must be properly aligned now. */
4135 gcc_assert (!(stack_pointer_delta
4136 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
4138 before_call = get_last_insn ();
4140 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4141 will set inhibit_defer_pop to that value. */
4142 /* The return type is needed to decide how many bytes the function pops.
4143 Signedness plays no role in that, so for simplicity, we pretend it's
4144 always signed. We also assume that the list of arguments passed has
4145 no impact, so we pretend it is unknown. */
4147 emit_call_1 (fun, NULL,
4148 get_identifier (XSTR (orgfun, 0)),
4149 build_function_type (tfom, NULL_TREE),
4150 original_args_size.constant, args_size.constant,
4151 struct_value_size,
4152 targetm.calls.function_arg (args_so_far,
4153 VOIDmode, void_type_node, true),
4154 valreg,
4155 old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
4157 /* Right-shift returned value if necessary. */
4158 if (!pcc_struct_value
4159 && TYPE_MODE (tfom) != BLKmode
4160 && targetm.calls.return_in_msb (tfom))
4162 shift_return_value (TYPE_MODE (tfom), false, valreg);
4163 valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
4166 /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
4167 that it should complain if nonvolatile values are live. For
4168 functions that cannot return, inform flow that control does not
4169 fall through. */
4171 if (flags & ECF_NORETURN)
4173 /* The barrier note must be emitted
4174 immediately after the CALL_INSN. Some ports emit more than
4175 just a CALL_INSN above, so we must search for it here. */
4177 rtx last = get_last_insn ();
4178 while (!CALL_P (last))
4180 last = PREV_INSN (last);
4181 /* There was no CALL_INSN? */
4182 gcc_assert (last != before_call);
4185 emit_barrier_after (last);
4188 /* Now restore inhibit_defer_pop to its actual original value. */
4189 OK_DEFER_POP;
4191 pop_temp_slots ();
4193 /* Copy the value to the right place. */
4194 if (outmode != VOIDmode && retval)
4196 if (mem_value)
4198 if (value == 0)
4199 value = mem_value;
4200 if (value != mem_value)
4201 emit_move_insn (value, mem_value);
4203 else if (GET_CODE (valreg) == PARALLEL)
4205 if (value == 0)
4206 value = gen_reg_rtx (outmode);
4207 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
4209 else
4211 /* Convert to the proper mode if a promotion has been active. */
4212 if (GET_MODE (valreg) != outmode)
4214 int unsignedp = TYPE_UNSIGNED (tfom);
4216 gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
4217 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
4218 == GET_MODE (valreg));
4219 valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
4222 if (value != 0)
4223 emit_move_insn (value, valreg);
4224 else
4225 value = valreg;
4229 if (ACCUMULATE_OUTGOING_ARGS)
4231 #ifdef REG_PARM_STACK_SPACE
4232 if (save_area)
4233 restore_fixed_argument_area (save_area, argblock,
4234 high_to_save, low_to_save);
4235 #endif
4237 /* If we saved any argument areas, restore them. */
4238 for (count = 0; count < nargs; count++)
4239 if (argvec[count].save_area)
4241 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
4242 rtx adr = plus_constant (argblock,
4243 argvec[count].locate.offset.constant);
4244 rtx stack_area = gen_rtx_MEM (save_mode,
4245 memory_address (save_mode, adr));
4247 if (save_mode == BLKmode)
4248 emit_block_move (stack_area,
4249 validize_mem (argvec[count].save_area),
4250 GEN_INT (argvec[count].locate.size.constant),
4251 BLOCK_OP_CALL_PARM);
4252 else
4253 emit_move_insn (stack_area, argvec[count].save_area);
4256 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4257 stack_usage_map = initial_stack_usage_map;
4260 free (stack_usage_map_buf);
4262 return value;
4266 /* Output a library call to function FUN (a SYMBOL_REF rtx)
4267 (emitting the queue unless NO_QUEUE is nonzero),
4268 for a value of mode OUTMODE,
4269 with NARGS different arguments, passed as alternating rtx values
4270 and machine_modes to convert them to.
4272 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
4273 `const' calls, LCT_PURE for `pure' calls, or other LCT_ value for
4274 other types of library calls. */
4276 void
4277 emit_library_call (rtx orgfun, enum libcall_type fn_type,
4278 enum machine_mode outmode, int nargs, ...)
4280 va_list p;
4282 va_start (p, nargs);
4283 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4284 va_end (p);
4287 /* Like emit_library_call except that an extra argument, VALUE,
4288 comes second and says where to store the result.
4289 (If VALUE is zero, this function chooses a convenient way
4290 to return the value.
4292 This function returns an rtx for where the value is to be found.
4293 If VALUE is nonzero, VALUE is returned. */
4296 emit_library_call_value (rtx orgfun, rtx value,
4297 enum libcall_type fn_type,
4298 enum machine_mode outmode, int nargs, ...)
4300 rtx result;
4301 va_list p;
4303 va_start (p, nargs);
4304 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4305 nargs, p);
4306 va_end (p);
4308 return result;
4311 /* Store a single argument for a function call
4312 into the register or memory area where it must be passed.
4313 *ARG describes the argument value and where to pass it.
4315 ARGBLOCK is the address of the stack-block for all the arguments,
4316 or 0 on a machine where arguments are pushed individually.
4318 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4319 so must be careful about how the stack is used.
4321 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4322 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4323 that we need not worry about saving and restoring the stack.
4325 FNDECL is the declaration of the function we are calling.
4327 Return nonzero if this arg should cause sibcall failure,
4328 zero otherwise. */
4330 static int
4331 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4332 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4334 tree pval = arg->tree_value;
4335 rtx reg = 0;
4336 int partial = 0;
4337 int used = 0;
4338 int i, lower_bound = 0, upper_bound = 0;
4339 int sibcall_failure = 0;
4341 if (TREE_CODE (pval) == ERROR_MARK)
4342 return 1;
4344 /* Push a new temporary level for any temporaries we make for
4345 this argument. */
4346 push_temp_slots ();
4348 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4350 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4351 save any previous data at that location. */
4352 if (argblock && ! variable_size && arg->stack)
4354 #ifdef ARGS_GROW_DOWNWARD
4355 /* stack_slot is negative, but we want to index stack_usage_map
4356 with positive values. */
4357 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4358 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4359 else
4360 upper_bound = 0;
4362 lower_bound = upper_bound - arg->locate.size.constant;
4363 #else
4364 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4365 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4366 else
4367 lower_bound = 0;
4369 upper_bound = lower_bound + arg->locate.size.constant;
4370 #endif
4372 i = lower_bound;
4373 /* Don't worry about things in the fixed argument area;
4374 it has already been saved. */
4375 if (i < reg_parm_stack_space)
4376 i = reg_parm_stack_space;
4377 while (i < upper_bound && stack_usage_map[i] == 0)
4378 i++;
4380 if (i < upper_bound)
4382 /* We need to make a save area. */
4383 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4384 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4385 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4386 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4388 if (save_mode == BLKmode)
4390 tree ot = TREE_TYPE (arg->tree_value);
4391 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4392 | TYPE_QUAL_CONST));
4394 arg->save_area = assign_temp (nt, 0, 1, 1);
4395 preserve_temp_slots (arg->save_area);
4396 emit_block_move (validize_mem (arg->save_area), stack_area,
4397 GEN_INT (arg->locate.size.constant),
4398 BLOCK_OP_CALL_PARM);
4400 else
4402 arg->save_area = gen_reg_rtx (save_mode);
4403 emit_move_insn (arg->save_area, stack_area);
4409 /* If this isn't going to be placed on both the stack and in registers,
4410 set up the register and number of words. */
4411 if (! arg->pass_on_stack)
4413 if (flags & ECF_SIBCALL)
4414 reg = arg->tail_call_reg;
4415 else
4416 reg = arg->reg;
4417 partial = arg->partial;
4420 /* Being passed entirely in a register. We shouldn't be called in
4421 this case. */
4422 gcc_assert (reg == 0 || partial != 0);
4424 /* If this arg needs special alignment, don't load the registers
4425 here. */
4426 if (arg->n_aligned_regs != 0)
4427 reg = 0;
4429 /* If this is being passed partially in a register, we can't evaluate
4430 it directly into its stack slot. Otherwise, we can. */
4431 if (arg->value == 0)
4433 /* stack_arg_under_construction is nonzero if a function argument is
4434 being evaluated directly into the outgoing argument list and
4435 expand_call must take special action to preserve the argument list
4436 if it is called recursively.
4438 For scalar function arguments stack_usage_map is sufficient to
4439 determine which stack slots must be saved and restored. Scalar
4440 arguments in general have pass_on_stack == 0.
4442 If this argument is initialized by a function which takes the
4443 address of the argument (a C++ constructor or a C function
4444 returning a BLKmode structure), then stack_usage_map is
4445 insufficient and expand_call must push the stack around the
4446 function call. Such arguments have pass_on_stack == 1.
4448 Note that it is always safe to set stack_arg_under_construction,
4449 but this generates suboptimal code if set when not needed. */
4451 if (arg->pass_on_stack)
4452 stack_arg_under_construction++;
4454 arg->value = expand_expr (pval,
4455 (partial
4456 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4457 ? NULL_RTX : arg->stack,
4458 VOIDmode, EXPAND_STACK_PARM);
4460 /* If we are promoting object (or for any other reason) the mode
4461 doesn't agree, convert the mode. */
4463 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4464 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4465 arg->value, arg->unsignedp);
4467 if (arg->pass_on_stack)
4468 stack_arg_under_construction--;
4471 /* Check for overlap with already clobbered argument area. */
4472 if ((flags & ECF_SIBCALL)
4473 && MEM_P (arg->value)
4474 && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
4475 arg->locate.size.constant))
4476 sibcall_failure = 1;
4478 /* Don't allow anything left on stack from computation
4479 of argument to alloca. */
4480 if (flags & ECF_MAY_BE_ALLOCA)
4481 do_pending_stack_adjust ();
4483 if (arg->value == arg->stack)
4484 /* If the value is already in the stack slot, we are done. */
4486 else if (arg->mode != BLKmode)
4488 int size;
4489 unsigned int parm_align;
4491 /* Argument is a scalar, not entirely passed in registers.
4492 (If part is passed in registers, arg->partial says how much
4493 and emit_push_insn will take care of putting it there.)
4495 Push it, and if its size is less than the
4496 amount of space allocated to it,
4497 also bump stack pointer by the additional space.
4498 Note that in C the default argument promotions
4499 will prevent such mismatches. */
4501 size = GET_MODE_SIZE (arg->mode);
4502 /* Compute how much space the push instruction will push.
4503 On many machines, pushing a byte will advance the stack
4504 pointer by a halfword. */
4505 #ifdef PUSH_ROUNDING
4506 size = PUSH_ROUNDING (size);
4507 #endif
4508 used = size;
4510 /* Compute how much space the argument should get:
4511 round up to a multiple of the alignment for arguments. */
4512 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4513 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4514 / (PARM_BOUNDARY / BITS_PER_UNIT))
4515 * (PARM_BOUNDARY / BITS_PER_UNIT));
4517 /* Compute the alignment of the pushed argument. */
4518 parm_align = arg->locate.boundary;
4519 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4521 int pad = used - size;
4522 if (pad)
4524 unsigned int pad_align = (pad & -pad) * BITS_PER_UNIT;
4525 parm_align = MIN (parm_align, pad_align);
4529 /* This isn't already where we want it on the stack, so put it there.
4530 This can either be done with push or copy insns. */
4531 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4532 parm_align, partial, reg, used - size, argblock,
4533 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4534 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4536 /* Unless this is a partially-in-register argument, the argument is now
4537 in the stack. */
4538 if (partial == 0)
4539 arg->value = arg->stack;
4541 else
4543 /* BLKmode, at least partly to be pushed. */
4545 unsigned int parm_align;
4546 int excess;
4547 rtx size_rtx;
4549 /* Pushing a nonscalar.
4550 If part is passed in registers, PARTIAL says how much
4551 and emit_push_insn will take care of putting it there. */
4553 /* Round its size up to a multiple
4554 of the allocation unit for arguments. */
4556 if (arg->locate.size.var != 0)
4558 excess = 0;
4559 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4561 else
4563 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4564 for BLKmode is careful to avoid it. */
4565 excess = (arg->locate.size.constant
4566 - int_size_in_bytes (TREE_TYPE (pval))
4567 + partial);
4568 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4569 NULL_RTX, TYPE_MODE (sizetype),
4570 EXPAND_NORMAL);
4573 parm_align = arg->locate.boundary;
4575 /* When an argument is padded down, the block is aligned to
4576 PARM_BOUNDARY, but the actual argument isn't. */
4577 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4579 if (arg->locate.size.var)
4580 parm_align = BITS_PER_UNIT;
4581 else if (excess)
4583 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4584 parm_align = MIN (parm_align, excess_align);
4588 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4590 /* emit_push_insn might not work properly if arg->value and
4591 argblock + arg->locate.offset areas overlap. */
4592 rtx x = arg->value;
4593 int i = 0;
4595 if (XEXP (x, 0) == crtl->args.internal_arg_pointer
4596 || (GET_CODE (XEXP (x, 0)) == PLUS
4597 && XEXP (XEXP (x, 0), 0) ==
4598 crtl->args.internal_arg_pointer
4599 && CONST_INT_P (XEXP (XEXP (x, 0), 1))))
4601 if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
4602 i = INTVAL (XEXP (XEXP (x, 0), 1));
4604 /* expand_call should ensure this. */
4605 gcc_assert (!arg->locate.offset.var
4606 && arg->locate.size.var == 0
4607 && CONST_INT_P (size_rtx));
4609 if (arg->locate.offset.constant > i)
4611 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4612 sibcall_failure = 1;
4614 else if (arg->locate.offset.constant < i)
4616 /* Use arg->locate.size.constant instead of size_rtx
4617 because we only care about the part of the argument
4618 on the stack. */
4619 if (i < (arg->locate.offset.constant
4620 + arg->locate.size.constant))
4621 sibcall_failure = 1;
4623 else
4625 /* Even though they appear to be at the same location,
4626 if part of the outgoing argument is in registers,
4627 they aren't really at the same location. Check for
4628 this by making sure that the incoming size is the
4629 same as the outgoing size. */
4630 if (arg->locate.size.constant != INTVAL (size_rtx))
4631 sibcall_failure = 1;
4636 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4637 parm_align, partial, reg, excess, argblock,
4638 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4639 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4641 /* Unless this is a partially-in-register argument, the argument is now
4642 in the stack.
4644 ??? Unlike the case above, in which we want the actual
4645 address of the data, so that we can load it directly into a
4646 register, here we want the address of the stack slot, so that
4647 it's properly aligned for word-by-word copying or something
4648 like that. It's not clear that this is always correct. */
4649 if (partial == 0)
4650 arg->value = arg->stack_slot;
4653 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
4655 tree type = TREE_TYPE (arg->tree_value);
4656 arg->parallel_value
4657 = emit_group_load_into_temps (arg->reg, arg->value, type,
4658 int_size_in_bytes (type));
4661 /* Mark all slots this store used. */
4662 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4663 && argblock && ! variable_size && arg->stack)
4664 for (i = lower_bound; i < upper_bound; i++)
4665 stack_usage_map[i] = 1;
4667 /* Once we have pushed something, pops can't safely
4668 be deferred during the rest of the arguments. */
4669 NO_DEFER_POP;
4671 /* Free any temporary slots made in processing this argument. Show
4672 that we might have taken the address of something and pushed that
4673 as an operand. */
4674 preserve_temp_slots (NULL_RTX);
4675 free_temp_slots ();
4676 pop_temp_slots ();
4678 return sibcall_failure;
4681 /* Nonzero if we do not know how to pass TYPE solely in registers. */
4683 bool
4684 must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
4685 const_tree type)
4687 if (!type)
4688 return false;
4690 /* If the type has variable size... */
4691 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4692 return true;
4694 /* If the type is marked as addressable (it is required
4695 to be constructed into the stack)... */
4696 if (TREE_ADDRESSABLE (type))
4697 return true;
4699 return false;
4702 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
4703 takes trailing padding of a structure into account. */
4704 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
4706 bool
4707 must_pass_in_stack_var_size_or_pad (enum machine_mode mode, const_tree type)
4709 if (!type)
4710 return false;
4712 /* If the type has variable size... */
4713 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4714 return true;
4716 /* If the type is marked as addressable (it is required
4717 to be constructed into the stack)... */
4718 if (TREE_ADDRESSABLE (type))
4719 return true;
4721 /* If the padding and mode of the type is such that a copy into
4722 a register would put it into the wrong part of the register. */
4723 if (mode == BLKmode
4724 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4725 && (FUNCTION_ARG_PADDING (mode, type)
4726 == (BYTES_BIG_ENDIAN ? upward : downward)))
4727 return true;
4729 return false;