* sh.h (REG_CLASS_FROM_LETTER): Change to:
[official-gcc.git] / gcc / calls.c
blobad18318627baae4fb51c13c24f571c525817d8d4
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "expr.h"
30 #include "libfuncs.h"
31 #include "function.h"
32 #include "regs.h"
33 #include "toplev.h"
34 #include "output.h"
35 #include "tm_p.h"
36 #include "timevar.h"
37 #include "sbitmap.h"
38 #include "langhooks.h"
39 #include "target.h"
40 #include "cgraph.h"
41 #include "except.h"
43 /* Decide whether a function's arguments should be processed
44 from first to last or from last to first.
46 They should if the stack and args grow in opposite directions, but
47 only if we have push insns. */
49 #ifdef PUSH_ROUNDING
51 #ifndef PUSH_ARGS_REVERSED
52 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
53 #define PUSH_ARGS_REVERSED PUSH_ARGS
54 #endif
55 #endif
57 #endif
59 #ifndef PUSH_ARGS_REVERSED
60 #define PUSH_ARGS_REVERSED 0
61 #endif
63 #ifndef STACK_POINTER_OFFSET
64 #define STACK_POINTER_OFFSET 0
65 #endif
67 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
68 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
70 /* Data structure and subroutines used within expand_call. */
72 struct arg_data
74 /* Tree node for this argument. */
75 tree tree_value;
76 /* Mode for value; TYPE_MODE unless promoted. */
77 enum machine_mode mode;
78 /* Current RTL value for argument, or 0 if it isn't precomputed. */
79 rtx value;
80 /* Initially-compute RTL value for argument; only for const functions. */
81 rtx initial_value;
82 /* Register to pass this argument in, 0 if passed on stack, or an
83 PARALLEL if the arg is to be copied into multiple non-contiguous
84 registers. */
85 rtx reg;
86 /* Register to pass this argument in when generating tail call sequence.
87 This is not the same register as for normal calls on machines with
88 register windows. */
89 rtx tail_call_reg;
90 /* If REG was promoted from the actual mode of the argument expression,
91 indicates whether the promotion is sign- or zero-extended. */
92 int unsignedp;
93 /* Number of registers to use. 0 means put the whole arg in registers.
94 Also 0 if not passed in registers. */
95 int partial;
96 /* Nonzero if argument must be passed on stack.
97 Note that some arguments may be passed on the stack
98 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
99 pass_on_stack identifies arguments that *cannot* go in registers. */
100 int pass_on_stack;
101 /* Some fields packaged up for locate_and_pad_parm. */
102 struct locate_and_pad_arg_data locate;
103 /* Location on the stack at which parameter should be stored. The store
104 has already been done if STACK == VALUE. */
105 rtx stack;
106 /* Location on the stack of the start of this argument slot. This can
107 differ from STACK if this arg pads downward. This location is known
108 to be aligned to FUNCTION_ARG_BOUNDARY. */
109 rtx stack_slot;
110 /* Place that this stack area has been saved, if needed. */
111 rtx save_area;
112 /* If an argument's alignment does not permit direct copying into registers,
113 copy in smaller-sized pieces into pseudos. These are stored in a
114 block pointed to by this field. The next field says how many
115 word-sized pseudos we made. */
116 rtx *aligned_regs;
117 int n_aligned_regs;
120 /* A vector of one char per byte of stack space. A byte if nonzero if
121 the corresponding stack location has been used.
122 This vector is used to prevent a function call within an argument from
123 clobbering any stack already set up. */
124 static char *stack_usage_map;
126 /* Size of STACK_USAGE_MAP. */
127 static int highest_outgoing_arg_in_use;
129 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
130 stack location's tail call argument has been already stored into the stack.
131 This bitmap is used to prevent sibling call optimization if function tries
132 to use parent's incoming argument slots when they have been already
133 overwritten with tail call arguments. */
134 static sbitmap stored_args_map;
136 /* stack_arg_under_construction is nonzero when an argument may be
137 initialized with a constructor call (including a C function that
138 returns a BLKmode struct) and expand_call must take special action
139 to make sure the object being constructed does not overlap the
140 argument list for the constructor call. */
141 int stack_arg_under_construction;
143 static int calls_function PARAMS ((tree, int));
144 static int calls_function_1 PARAMS ((tree, int));
146 static void emit_call_1 PARAMS ((rtx, tree, tree, HOST_WIDE_INT,
147 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
148 rtx, int, rtx, int,
149 CUMULATIVE_ARGS *));
150 static void precompute_register_parameters PARAMS ((int,
151 struct arg_data *,
152 int *));
153 static int store_one_arg PARAMS ((struct arg_data *, rtx, int, int,
154 int));
155 static void store_unaligned_arguments_into_pseudos PARAMS ((struct arg_data *,
156 int));
157 static int finalize_must_preallocate PARAMS ((int, int,
158 struct arg_data *,
159 struct args_size *));
160 static void precompute_arguments PARAMS ((int, int,
161 struct arg_data *));
162 static int compute_argument_block_size PARAMS ((int,
163 struct args_size *,
164 int));
165 static void initialize_argument_information PARAMS ((int,
166 struct arg_data *,
167 struct args_size *,
168 int, tree, tree,
169 CUMULATIVE_ARGS *,
170 int, rtx *, int *,
171 int *, int *));
172 static void compute_argument_addresses PARAMS ((struct arg_data *,
173 rtx, int));
174 static rtx rtx_for_function_call PARAMS ((tree, tree));
175 static void load_register_parameters PARAMS ((struct arg_data *,
176 int, rtx *, int,
177 int, int *));
178 static rtx emit_library_call_value_1 PARAMS ((int, rtx, rtx,
179 enum libcall_type,
180 enum machine_mode,
181 int, va_list));
182 static int special_function_p PARAMS ((tree, int));
183 static rtx try_to_integrate PARAMS ((tree, tree, rtx,
184 int, tree, rtx));
185 static int check_sibcall_argument_overlap_1 PARAMS ((rtx));
186 static int check_sibcall_argument_overlap PARAMS ((rtx, struct arg_data *,
187 int));
189 static int combine_pending_stack_adjustment_and_call
190 PARAMS ((int, struct args_size *, int));
191 static tree fix_unsafe_tree PARAMS ((tree));
193 #ifdef REG_PARM_STACK_SPACE
194 static rtx save_fixed_argument_area PARAMS ((int, rtx, int *, int *));
195 static void restore_fixed_argument_area PARAMS ((rtx, rtx, int, int));
196 #endif
198 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
199 `alloca'.
201 If WHICH is 0, return 1 if EXP contains a call to any function.
202 Actually, we only need return 1 if evaluating EXP would require pushing
203 arguments on the stack, but that is too difficult to compute, so we just
204 assume any function call might require the stack. */
206 static tree calls_function_save_exprs;
208 static int
209 calls_function (exp, which)
210 tree exp;
211 int which;
213 int val;
215 calls_function_save_exprs = 0;
216 val = calls_function_1 (exp, which);
217 calls_function_save_exprs = 0;
218 return val;
221 /* Recursive function to do the work of above function. */
223 static int
224 calls_function_1 (exp, which)
225 tree exp;
226 int which;
228 int i;
229 enum tree_code code = TREE_CODE (exp);
230 int class = TREE_CODE_CLASS (code);
231 int length = first_rtl_op (code);
233 /* If this code is language-specific, we don't know what it will do. */
234 if ((int) code >= NUM_TREE_CODES)
235 return 1;
237 switch (code)
239 case CALL_EXPR:
240 if (which == 0)
241 return 1;
242 else if ((TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
243 == FUNCTION_TYPE)
244 && (TYPE_RETURNS_STACK_DEPRESSED
245 (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
246 return 1;
247 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
248 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
249 == FUNCTION_DECL)
250 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
252 & ECF_MAY_BE_ALLOCA))
253 return 1;
255 break;
257 case CONSTRUCTOR:
259 tree tem;
261 for (tem = CONSTRUCTOR_ELTS (exp); tem != 0; tem = TREE_CHAIN (tem))
262 if (calls_function_1 (TREE_VALUE (tem), which))
263 return 1;
266 return 0;
268 case SAVE_EXPR:
269 if (SAVE_EXPR_RTL (exp) != 0)
270 return 0;
271 if (value_member (exp, calls_function_save_exprs))
272 return 0;
273 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
274 calls_function_save_exprs);
275 return (TREE_OPERAND (exp, 0) != 0
276 && calls_function_1 (TREE_OPERAND (exp, 0), which));
278 case BLOCK:
280 tree local;
281 tree subblock;
283 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
284 if (DECL_INITIAL (local) != 0
285 && calls_function_1 (DECL_INITIAL (local), which))
286 return 1;
288 for (subblock = BLOCK_SUBBLOCKS (exp);
289 subblock;
290 subblock = TREE_CHAIN (subblock))
291 if (calls_function_1 (subblock, which))
292 return 1;
294 return 0;
296 case TREE_LIST:
297 for (; exp != 0; exp = TREE_CHAIN (exp))
298 if (calls_function_1 (TREE_VALUE (exp), which))
299 return 1;
300 return 0;
302 default:
303 break;
306 /* Only expressions, references, and blocks can contain calls. */
307 if (! IS_EXPR_CODE_CLASS (class) && class != 'r' && class != 'b')
308 return 0;
310 for (i = 0; i < length; i++)
311 if (TREE_OPERAND (exp, i) != 0
312 && calls_function_1 (TREE_OPERAND (exp, i), which))
313 return 1;
315 return 0;
318 /* Force FUNEXP into a form suitable for the address of a CALL,
319 and return that as an rtx. Also load the static chain register
320 if FNDECL is a nested function.
322 CALL_FUSAGE points to a variable holding the prospective
323 CALL_INSN_FUNCTION_USAGE information. */
326 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen, sibcallp)
327 rtx funexp;
328 tree fndecl;
329 rtx *call_fusage;
330 int reg_parm_seen;
331 int sibcallp;
333 rtx static_chain_value = 0;
335 funexp = protect_from_queue (funexp, 0);
337 if (fndecl != 0)
338 /* Get possible static chain value for nested function in C. */
339 static_chain_value = lookup_static_chain (fndecl);
341 /* Make a valid memory address and copy constants thru pseudo-regs,
342 but not for a constant address if -fno-function-cse. */
343 if (GET_CODE (funexp) != SYMBOL_REF)
344 /* If we are using registers for parameters, force the
345 function address into a register now. */
346 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
347 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
348 : memory_address (FUNCTION_MODE, funexp));
349 else if (! sibcallp)
351 #ifndef NO_FUNCTION_CSE
352 if (optimize && ! flag_no_function_cse)
353 #ifdef NO_RECURSIVE_FUNCTION_CSE
354 if (fndecl != current_function_decl)
355 #endif
356 funexp = force_reg (Pmode, funexp);
357 #endif
360 if (static_chain_value != 0)
362 emit_move_insn (static_chain_rtx, static_chain_value);
364 if (GET_CODE (static_chain_rtx) == REG)
365 use_reg (call_fusage, static_chain_rtx);
368 return funexp;
371 /* Generate instructions to call function FUNEXP,
372 and optionally pop the results.
373 The CALL_INSN is the first insn generated.
375 FNDECL is the declaration node of the function. This is given to the
376 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
378 FUNTYPE is the data type of the function. This is given to the macro
379 RETURN_POPS_ARGS to determine whether this function pops its own args.
380 We used to allow an identifier for library functions, but that doesn't
381 work when the return type is an aggregate type and the calling convention
382 says that the pointer to this aggregate is to be popped by the callee.
384 STACK_SIZE is the number of bytes of arguments on the stack,
385 ROUNDED_STACK_SIZE is that number rounded up to
386 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
387 both to put into the call insn and to generate explicit popping
388 code if necessary.
390 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
391 It is zero if this call doesn't want a structure value.
393 NEXT_ARG_REG is the rtx that results from executing
394 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
395 just after all the args have had their registers assigned.
396 This could be whatever you like, but normally it is the first
397 arg-register beyond those used for args in this call,
398 or 0 if all the arg-registers are used in this call.
399 It is passed on to `gen_call' so you can put this info in the call insn.
401 VALREG is a hard register in which a value is returned,
402 or 0 if the call does not return a value.
404 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
405 the args to this call were processed.
406 We restore `inhibit_defer_pop' to that value.
408 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
409 denote registers used by the called function. */
411 static void
412 emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
413 struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
414 call_fusage, ecf_flags, args_so_far)
415 rtx funexp;
416 tree fndecl ATTRIBUTE_UNUSED;
417 tree funtype ATTRIBUTE_UNUSED;
418 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED;
419 HOST_WIDE_INT rounded_stack_size;
420 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED;
421 rtx next_arg_reg ATTRIBUTE_UNUSED;
422 rtx valreg;
423 int old_inhibit_defer_pop;
424 rtx call_fusage;
425 int ecf_flags;
426 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED;
428 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
429 rtx call_insn;
430 int already_popped = 0;
431 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
432 #if defined (HAVE_call) && defined (HAVE_call_value)
433 rtx struct_value_size_rtx;
434 struct_value_size_rtx = GEN_INT (struct_value_size);
435 #endif
437 #ifdef CALL_POPS_ARGS
438 n_popped += CALL_POPS_ARGS (* args_so_far);
439 #endif
441 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
442 and we don't want to load it into a register as an optimization,
443 because prepare_call_address already did it if it should be done. */
444 if (GET_CODE (funexp) != SYMBOL_REF)
445 funexp = memory_address (FUNCTION_MODE, funexp);
447 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
448 if ((ecf_flags & ECF_SIBCALL)
449 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
450 && (n_popped > 0 || stack_size == 0))
452 rtx n_pop = GEN_INT (n_popped);
453 rtx pat;
455 /* If this subroutine pops its own args, record that in the call insn
456 if possible, for the sake of frame pointer elimination. */
458 if (valreg)
459 pat = GEN_SIBCALL_VALUE_POP (valreg,
460 gen_rtx_MEM (FUNCTION_MODE, funexp),
461 rounded_stack_size_rtx, next_arg_reg,
462 n_pop);
463 else
464 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
465 rounded_stack_size_rtx, next_arg_reg, n_pop);
467 emit_call_insn (pat);
468 already_popped = 1;
470 else
471 #endif
473 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
474 /* If the target has "call" or "call_value" insns, then prefer them
475 if no arguments are actually popped. If the target does not have
476 "call" or "call_value" insns, then we must use the popping versions
477 even if the call has no arguments to pop. */
478 #if defined (HAVE_call) && defined (HAVE_call_value)
479 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
480 && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
481 #else
482 if (HAVE_call_pop && HAVE_call_value_pop)
483 #endif
485 rtx n_pop = GEN_INT (n_popped);
486 rtx pat;
488 /* If this subroutine pops its own args, record that in the call insn
489 if possible, for the sake of frame pointer elimination. */
491 if (valreg)
492 pat = GEN_CALL_VALUE_POP (valreg,
493 gen_rtx_MEM (FUNCTION_MODE, funexp),
494 rounded_stack_size_rtx, next_arg_reg, n_pop);
495 else
496 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
497 rounded_stack_size_rtx, next_arg_reg, n_pop);
499 emit_call_insn (pat);
500 already_popped = 1;
502 else
503 #endif
505 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
506 if ((ecf_flags & ECF_SIBCALL)
507 && HAVE_sibcall && HAVE_sibcall_value)
509 if (valreg)
510 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
511 gen_rtx_MEM (FUNCTION_MODE, funexp),
512 rounded_stack_size_rtx,
513 next_arg_reg, NULL_RTX));
514 else
515 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
516 rounded_stack_size_rtx, next_arg_reg,
517 struct_value_size_rtx));
519 else
520 #endif
522 #if defined (HAVE_call) && defined (HAVE_call_value)
523 if (HAVE_call && HAVE_call_value)
525 if (valreg)
526 emit_call_insn (GEN_CALL_VALUE (valreg,
527 gen_rtx_MEM (FUNCTION_MODE, funexp),
528 rounded_stack_size_rtx, next_arg_reg,
529 NULL_RTX));
530 else
531 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
532 rounded_stack_size_rtx, next_arg_reg,
533 struct_value_size_rtx));
535 else
536 #endif
537 abort ();
539 /* Find the call we just emitted. */
540 call_insn = last_call_insn ();
542 /* Mark memory as used for "pure" function call. */
543 if (ecf_flags & ECF_PURE)
544 call_fusage
545 = gen_rtx_EXPR_LIST
546 (VOIDmode,
547 gen_rtx_USE (VOIDmode,
548 gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
549 call_fusage);
551 /* Put the register usage information there. */
552 add_function_usage_to (call_insn, call_fusage);
554 /* If this is a const call, then set the insn's unchanging bit. */
555 if (ecf_flags & (ECF_CONST | ECF_PURE))
556 CONST_OR_PURE_CALL_P (call_insn) = 1;
558 /* If this call can't throw, attach a REG_EH_REGION reg note to that
559 effect. */
560 if (ecf_flags & ECF_NOTHROW)
561 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
562 REG_NOTES (call_insn));
563 else
564 note_eh_region_may_contain_throw ();
566 if (ecf_flags & ECF_NORETURN)
567 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
568 REG_NOTES (call_insn));
569 if (ecf_flags & ECF_ALWAYS_RETURN)
570 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_ALWAYS_RETURN, const0_rtx,
571 REG_NOTES (call_insn));
573 if (ecf_flags & ECF_RETURNS_TWICE)
575 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
576 REG_NOTES (call_insn));
577 current_function_calls_setjmp = 1;
580 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
582 /* Restore this now, so that we do defer pops for this call's args
583 if the context of the call as a whole permits. */
584 inhibit_defer_pop = old_inhibit_defer_pop;
586 if (n_popped > 0)
588 if (!already_popped)
589 CALL_INSN_FUNCTION_USAGE (call_insn)
590 = gen_rtx_EXPR_LIST (VOIDmode,
591 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
592 CALL_INSN_FUNCTION_USAGE (call_insn));
593 rounded_stack_size -= n_popped;
594 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
595 stack_pointer_delta -= n_popped;
598 if (!ACCUMULATE_OUTGOING_ARGS)
600 /* If returning from the subroutine does not automatically pop the args,
601 we need an instruction to pop them sooner or later.
602 Perhaps do it now; perhaps just record how much space to pop later.
604 If returning from the subroutine does pop the args, indicate that the
605 stack pointer will be changed. */
607 if (rounded_stack_size != 0)
609 if (ecf_flags & ECF_SP_DEPRESSED)
610 /* Just pretend we did the pop. */
611 stack_pointer_delta -= rounded_stack_size;
612 else if (flag_defer_pop && inhibit_defer_pop == 0
613 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
614 pending_stack_adjust += rounded_stack_size;
615 else
616 adjust_stack (rounded_stack_size_rtx);
619 /* When we accumulate outgoing args, we must avoid any stack manipulations.
620 Restore the stack pointer to its original value now. Usually
621 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
622 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
623 popping variants of functions exist as well.
625 ??? We may optimize similar to defer_pop above, but it is
626 probably not worthwhile.
628 ??? It will be worthwhile to enable combine_stack_adjustments even for
629 such machines. */
630 else if (n_popped)
631 anti_adjust_stack (GEN_INT (n_popped));
634 /* Determine if the function identified by NAME and FNDECL is one with
635 special properties we wish to know about.
637 For example, if the function might return more than one time (setjmp), then
638 set RETURNS_TWICE to a nonzero value.
640 Similarly set LONGJMP for if the function is in the longjmp family.
642 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
643 space from the stack such as alloca. */
645 static int
646 special_function_p (fndecl, flags)
647 tree fndecl;
648 int flags;
650 if (! (flags & ECF_MALLOC)
651 && fndecl && DECL_NAME (fndecl)
652 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
653 /* Exclude functions not at the file scope, or not `extern',
654 since they are not the magic functions we would otherwise
655 think they are. */
656 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
658 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
659 const char *tname = name;
661 /* We assume that alloca will always be called by name. It
662 makes no sense to pass it as a pointer-to-function to
663 anything that does not understand its behavior. */
664 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
665 && name[0] == 'a'
666 && ! strcmp (name, "alloca"))
667 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
668 && name[0] == '_'
669 && ! strcmp (name, "__builtin_alloca"))))
670 flags |= ECF_MAY_BE_ALLOCA;
672 /* Disregard prefix _, __ or __x. */
673 if (name[0] == '_')
675 if (name[1] == '_' && name[2] == 'x')
676 tname += 3;
677 else if (name[1] == '_')
678 tname += 2;
679 else
680 tname += 1;
683 if (tname[0] == 's')
685 if ((tname[1] == 'e'
686 && (! strcmp (tname, "setjmp")
687 || ! strcmp (tname, "setjmp_syscall")))
688 || (tname[1] == 'i'
689 && ! strcmp (tname, "sigsetjmp"))
690 || (tname[1] == 'a'
691 && ! strcmp (tname, "savectx")))
692 flags |= ECF_RETURNS_TWICE;
694 if (tname[1] == 'i'
695 && ! strcmp (tname, "siglongjmp"))
696 flags |= ECF_LONGJMP;
698 else if ((tname[0] == 'q' && tname[1] == 's'
699 && ! strcmp (tname, "qsetjmp"))
700 || (tname[0] == 'v' && tname[1] == 'f'
701 && ! strcmp (tname, "vfork")))
702 flags |= ECF_RETURNS_TWICE;
704 else if (tname[0] == 'l' && tname[1] == 'o'
705 && ! strcmp (tname, "longjmp"))
706 flags |= ECF_LONGJMP;
708 else if ((tname[0] == 'f' && tname[1] == 'o'
709 && ! strcmp (tname, "fork"))
710 /* Linux specific: __clone. check NAME to insist on the
711 leading underscores, to avoid polluting the ISO / POSIX
712 namespace. */
713 || (name[0] == '_' && name[1] == '_'
714 && ! strcmp (tname, "clone"))
715 || (tname[0] == 'e' && tname[1] == 'x' && tname[2] == 'e'
716 && tname[3] == 'c' && (tname[4] == 'l' || tname[4] == 'v')
717 && (tname[5] == '\0'
718 || ((tname[5] == 'p' || tname[5] == 'e')
719 && tname[6] == '\0'))))
720 flags |= ECF_FORK_OR_EXEC;
722 return flags;
725 /* Return nonzero when tree represent call to longjmp. */
728 setjmp_call_p (fndecl)
729 tree fndecl;
731 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
734 /* Return true when exp contains alloca call. */
735 bool
736 alloca_call_p (exp)
737 tree exp;
739 if (TREE_CODE (exp) == CALL_EXPR
740 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
741 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
742 == FUNCTION_DECL)
743 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
744 0) & ECF_MAY_BE_ALLOCA))
745 return true;
746 return false;
749 /* Detect flags (function attributes) from the function decl or type node. */
752 flags_from_decl_or_type (exp)
753 tree exp;
755 int flags = 0;
756 tree type = exp;
758 if (DECL_P (exp))
760 struct cgraph_rtl_info *i = cgraph_rtl_info (exp);
761 type = TREE_TYPE (exp);
763 if (i)
765 if (i->pure_function)
766 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
767 if (i->const_function)
768 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
771 /* The function exp may have the `malloc' attribute. */
772 if (DECL_IS_MALLOC (exp))
773 flags |= ECF_MALLOC;
775 /* The function exp may have the `pure' attribute. */
776 if (DECL_IS_PURE (exp))
777 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
779 if (TREE_NOTHROW (exp))
780 flags |= ECF_NOTHROW;
783 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
784 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
786 if (TREE_THIS_VOLATILE (exp))
787 flags |= ECF_NORETURN;
789 /* Mark if the function returns with the stack pointer depressed. We
790 cannot consider it pure or constant in that case. */
791 if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
793 flags |= ECF_SP_DEPRESSED;
794 flags &= ~(ECF_PURE | ECF_CONST | ECF_LIBCALL_BLOCK);
797 return flags;
800 /* Precompute all register parameters as described by ARGS, storing values
801 into fields within the ARGS array.
803 NUM_ACTUALS indicates the total number elements in the ARGS array.
805 Set REG_PARM_SEEN if we encounter a register parameter. */
807 static void
808 precompute_register_parameters (num_actuals, args, reg_parm_seen)
809 int num_actuals;
810 struct arg_data *args;
811 int *reg_parm_seen;
813 int i;
815 *reg_parm_seen = 0;
817 for (i = 0; i < num_actuals; i++)
818 if (args[i].reg != 0 && ! args[i].pass_on_stack)
820 *reg_parm_seen = 1;
822 if (args[i].value == 0)
824 push_temp_slots ();
825 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
826 VOIDmode, 0);
827 preserve_temp_slots (args[i].value);
828 pop_temp_slots ();
830 /* ANSI doesn't require a sequence point here,
831 but PCC has one, so this will avoid some problems. */
832 emit_queue ();
835 /* If the value is a non-legitimate constant, force it into a
836 pseudo now. TLS symbols sometimes need a call to resolve. */
837 if (CONSTANT_P (args[i].value)
838 && !LEGITIMATE_CONSTANT_P (args[i].value))
839 args[i].value = force_reg (args[i].mode, args[i].value);
841 /* If we are to promote the function arg to a wider mode,
842 do it now. */
844 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
845 args[i].value
846 = convert_modes (args[i].mode,
847 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
848 args[i].value, args[i].unsignedp);
850 /* If the value is expensive, and we are inside an appropriately
851 short loop, put the value into a pseudo and then put the pseudo
852 into the hard reg.
854 For small register classes, also do this if this call uses
855 register parameters. This is to avoid reload conflicts while
856 loading the parameters registers. */
858 if ((! (GET_CODE (args[i].value) == REG
859 || (GET_CODE (args[i].value) == SUBREG
860 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
861 && args[i].mode != BLKmode
862 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
863 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
864 || preserve_subexpressions_p ()))
865 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
869 #ifdef REG_PARM_STACK_SPACE
871 /* The argument list is the property of the called routine and it
872 may clobber it. If the fixed area has been used for previous
873 parameters, we must save and restore it. */
875 static rtx
876 save_fixed_argument_area (reg_parm_stack_space, argblock,
877 low_to_save, high_to_save)
878 int reg_parm_stack_space;
879 rtx argblock;
880 int *low_to_save;
881 int *high_to_save;
883 int low;
884 int high;
886 /* Compute the boundary of the area that needs to be saved, if any. */
887 high = reg_parm_stack_space;
888 #ifdef ARGS_GROW_DOWNWARD
889 high += 1;
890 #endif
891 if (high > highest_outgoing_arg_in_use)
892 high = highest_outgoing_arg_in_use;
894 for (low = 0; low < high; low++)
895 if (stack_usage_map[low] != 0)
897 int num_to_save;
898 enum machine_mode save_mode;
899 int delta;
900 rtx stack_area;
901 rtx save_area;
903 while (stack_usage_map[--high] == 0)
906 *low_to_save = low;
907 *high_to_save = high;
909 num_to_save = high - low + 1;
910 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
912 /* If we don't have the required alignment, must do this
913 in BLKmode. */
914 if ((low & (MIN (GET_MODE_SIZE (save_mode),
915 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
916 save_mode = BLKmode;
918 #ifdef ARGS_GROW_DOWNWARD
919 delta = -high;
920 #else
921 delta = low;
922 #endif
923 stack_area = gen_rtx_MEM (save_mode,
924 memory_address (save_mode,
925 plus_constant (argblock,
926 delta)));
928 set_mem_align (stack_area, PARM_BOUNDARY);
929 if (save_mode == BLKmode)
931 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
932 emit_block_move (validize_mem (save_area), stack_area,
933 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
935 else
937 save_area = gen_reg_rtx (save_mode);
938 emit_move_insn (save_area, stack_area);
941 return save_area;
944 return NULL_RTX;
947 static void
948 restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
949 rtx save_area;
950 rtx argblock;
951 int high_to_save;
952 int low_to_save;
954 enum machine_mode save_mode = GET_MODE (save_area);
955 int delta;
956 rtx stack_area;
958 #ifdef ARGS_GROW_DOWNWARD
959 delta = -high_to_save;
960 #else
961 delta = low_to_save;
962 #endif
963 stack_area = gen_rtx_MEM (save_mode,
964 memory_address (save_mode,
965 plus_constant (argblock, delta)));
966 set_mem_align (stack_area, PARM_BOUNDARY);
968 if (save_mode != BLKmode)
969 emit_move_insn (stack_area, save_area);
970 else
971 emit_block_move (stack_area, validize_mem (save_area),
972 GEN_INT (high_to_save - low_to_save + 1),
973 BLOCK_OP_CALL_PARM);
975 #endif /* REG_PARM_STACK_SPACE */
977 /* If any elements in ARGS refer to parameters that are to be passed in
978 registers, but not in memory, and whose alignment does not permit a
979 direct copy into registers. Copy the values into a group of pseudos
980 which we will later copy into the appropriate hard registers.
982 Pseudos for each unaligned argument will be stored into the array
983 args[argnum].aligned_regs. The caller is responsible for deallocating
984 the aligned_regs array if it is nonzero. */
986 static void
987 store_unaligned_arguments_into_pseudos (args, num_actuals)
988 struct arg_data *args;
989 int num_actuals;
991 int i, j;
993 for (i = 0; i < num_actuals; i++)
994 if (args[i].reg != 0 && ! args[i].pass_on_stack
995 && args[i].mode == BLKmode
996 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
997 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
999 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1000 int big_endian_correction = 0;
1002 args[i].n_aligned_regs
1003 = args[i].partial ? args[i].partial
1004 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1006 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
1007 * args[i].n_aligned_regs);
1009 /* Structures smaller than a word are aligned to the least
1010 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
1011 this means we must skip the empty high order bytes when
1012 calculating the bit offset. */
1013 if (BYTES_BIG_ENDIAN
1014 && bytes < UNITS_PER_WORD)
1015 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
1017 for (j = 0; j < args[i].n_aligned_regs; j++)
1019 rtx reg = gen_reg_rtx (word_mode);
1020 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1021 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1023 args[i].aligned_regs[j] = reg;
1025 /* There is no need to restrict this code to loading items
1026 in TYPE_ALIGN sized hunks. The bitfield instructions can
1027 load up entire word sized registers efficiently.
1029 ??? This may not be needed anymore.
1030 We use to emit a clobber here but that doesn't let later
1031 passes optimize the instructions we emit. By storing 0 into
1032 the register later passes know the first AND to zero out the
1033 bitfield being set in the register is unnecessary. The store
1034 of 0 will be deleted as will at least the first AND. */
1036 emit_move_insn (reg, const0_rtx);
1038 bytes -= bitsize / BITS_PER_UNIT;
1039 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
1040 extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1041 word_mode, word_mode,
1042 BITS_PER_WORD),
1043 BITS_PER_WORD);
1048 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1049 ACTPARMS.
1051 NUM_ACTUALS is the total number of parameters.
1053 N_NAMED_ARGS is the total number of named arguments.
1055 FNDECL is the tree code for the target of this call (if known)
1057 ARGS_SO_FAR holds state needed by the target to know where to place
1058 the next argument.
1060 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1061 for arguments which are passed in registers.
1063 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1064 and may be modified by this routine.
1066 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1067 flags which may may be modified by this routine. */
1069 static void
1070 initialize_argument_information (num_actuals, args, args_size, n_named_args,
1071 actparms, fndecl, args_so_far,
1072 reg_parm_stack_space, old_stack_level,
1073 old_pending_adj, must_preallocate,
1074 ecf_flags)
1075 int num_actuals ATTRIBUTE_UNUSED;
1076 struct arg_data *args;
1077 struct args_size *args_size;
1078 int n_named_args ATTRIBUTE_UNUSED;
1079 tree actparms;
1080 tree fndecl;
1081 CUMULATIVE_ARGS *args_so_far;
1082 int reg_parm_stack_space;
1083 rtx *old_stack_level;
1084 int *old_pending_adj;
1085 int *must_preallocate;
1086 int *ecf_flags;
1088 /* 1 if scanning parms front to back, -1 if scanning back to front. */
1089 int inc;
1091 /* Count arg position in order args appear. */
1092 int argpos;
1094 int i;
1095 tree p;
1097 args_size->constant = 0;
1098 args_size->var = 0;
1100 /* In this loop, we consider args in the order they are written.
1101 We fill up ARGS from the front or from the back if necessary
1102 so that in any case the first arg to be pushed ends up at the front. */
1104 if (PUSH_ARGS_REVERSED)
1106 i = num_actuals - 1, inc = -1;
1107 /* In this case, must reverse order of args
1108 so that we compute and push the last arg first. */
1110 else
1112 i = 0, inc = 1;
1115 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1116 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
1118 tree type = TREE_TYPE (TREE_VALUE (p));
1119 int unsignedp;
1120 enum machine_mode mode;
1122 args[i].tree_value = TREE_VALUE (p);
1124 /* Replace erroneous argument with constant zero. */
1125 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1126 args[i].tree_value = integer_zero_node, type = integer_type_node;
1128 /* If TYPE is a transparent union, pass things the way we would
1129 pass the first field of the union. We have already verified that
1130 the modes are the same. */
1131 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
1132 type = TREE_TYPE (TYPE_FIELDS (type));
1134 /* Decide where to pass this arg.
1136 args[i].reg is nonzero if all or part is passed in registers.
1138 args[i].partial is nonzero if part but not all is passed in registers,
1139 and the exact value says how many words are passed in registers.
1141 args[i].pass_on_stack is nonzero if the argument must at least be
1142 computed on the stack. It may then be loaded back into registers
1143 if args[i].reg is nonzero.
1145 These decisions are driven by the FUNCTION_... macros and must agree
1146 with those made by function.c. */
1148 /* See if this argument should be passed by invisible reference. */
1149 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
1150 || TREE_ADDRESSABLE (type)
1151 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1152 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
1153 type, argpos < n_named_args)
1154 #endif
1157 /* If we're compiling a thunk, pass through invisible
1158 references instead of making a copy. */
1159 if (current_function_is_thunk
1160 #ifdef FUNCTION_ARG_CALLEE_COPIES
1161 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
1162 type, argpos < n_named_args)
1163 /* If it's in a register, we must make a copy of it too. */
1164 /* ??? Is this a sufficient test? Is there a better one? */
1165 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1166 && REG_P (DECL_RTL (args[i].tree_value)))
1167 && ! TREE_ADDRESSABLE (type))
1168 #endif
1171 /* C++ uses a TARGET_EXPR to indicate that we want to make a
1172 new object from the argument. If we are passing by
1173 invisible reference, the callee will do that for us, so we
1174 can strip off the TARGET_EXPR. This is not always safe,
1175 but it is safe in the only case where this is a useful
1176 optimization; namely, when the argument is a plain object.
1177 In that case, the frontend is just asking the backend to
1178 make a bitwise copy of the argument. */
1180 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
1181 && (DECL_P (TREE_OPERAND (args[i].tree_value, 1)))
1182 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1183 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1185 args[i].tree_value = build1 (ADDR_EXPR,
1186 build_pointer_type (type),
1187 args[i].tree_value);
1188 type = build_pointer_type (type);
1190 else if (TREE_CODE (args[i].tree_value) == TARGET_EXPR)
1192 /* In the V3 C++ ABI, parameters are destroyed in the caller.
1193 We implement this by passing the address of the temporary
1194 rather than expanding it into another allocated slot. */
1195 args[i].tree_value = build1 (ADDR_EXPR,
1196 build_pointer_type (type),
1197 args[i].tree_value);
1198 type = build_pointer_type (type);
1200 else
1202 /* We make a copy of the object and pass the address to the
1203 function being called. */
1204 rtx copy;
1206 if (!COMPLETE_TYPE_P (type)
1207 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1208 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1209 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1210 STACK_CHECK_MAX_VAR_SIZE))))
1212 /* This is a variable-sized object. Make space on the stack
1213 for it. */
1214 rtx size_rtx = expr_size (TREE_VALUE (p));
1216 if (*old_stack_level == 0)
1218 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1219 *old_pending_adj = pending_stack_adjust;
1220 pending_stack_adjust = 0;
1223 copy = gen_rtx_MEM (BLKmode,
1224 allocate_dynamic_stack_space
1225 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1226 set_mem_attributes (copy, type, 1);
1228 else
1229 copy = assign_temp (type, 0, 1, 0);
1231 store_expr (args[i].tree_value, copy, 0);
1232 *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1234 args[i].tree_value = build1 (ADDR_EXPR,
1235 build_pointer_type (type),
1236 make_tree (type, copy));
1237 type = build_pointer_type (type);
1241 mode = TYPE_MODE (type);
1242 unsignedp = TREE_UNSIGNED (type);
1244 #ifdef PROMOTE_FUNCTION_ARGS
1245 mode = promote_mode (type, mode, &unsignedp, 1);
1246 #endif
1248 args[i].unsignedp = unsignedp;
1249 args[i].mode = mode;
1251 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1252 argpos < n_named_args);
1253 #ifdef FUNCTION_INCOMING_ARG
1254 /* If this is a sibling call and the machine has register windows, the
1255 register window has to be unwinded before calling the routine, so
1256 arguments have to go into the incoming registers. */
1257 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1258 argpos < n_named_args);
1259 #else
1260 args[i].tail_call_reg = args[i].reg;
1261 #endif
1263 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1264 if (args[i].reg)
1265 args[i].partial
1266 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1267 argpos < n_named_args);
1268 #endif
1270 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1272 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1273 it means that we are to pass this arg in the register(s) designated
1274 by the PARALLEL, but also to pass it in the stack. */
1275 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1276 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1277 args[i].pass_on_stack = 1;
1279 /* If this is an addressable type, we must preallocate the stack
1280 since we must evaluate the object into its final location.
1282 If this is to be passed in both registers and the stack, it is simpler
1283 to preallocate. */
1284 if (TREE_ADDRESSABLE (type)
1285 || (args[i].pass_on_stack && args[i].reg != 0))
1286 *must_preallocate = 1;
1288 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1289 we cannot consider this function call constant. */
1290 if (TREE_ADDRESSABLE (type))
1291 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1293 /* Compute the stack-size of this argument. */
1294 if (args[i].reg == 0 || args[i].partial != 0
1295 || reg_parm_stack_space > 0
1296 || args[i].pass_on_stack)
1297 locate_and_pad_parm (mode, type,
1298 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1300 #else
1301 args[i].reg != 0,
1302 #endif
1303 args[i].pass_on_stack ? 0 : args[i].partial,
1304 fndecl, args_size, &args[i].locate);
1306 /* Update ARGS_SIZE, the total stack space for args so far. */
1308 args_size->constant += args[i].locate.size.constant;
1309 if (args[i].locate.size.var)
1310 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1312 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1313 have been used, etc. */
1315 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1316 argpos < n_named_args);
1320 /* Update ARGS_SIZE to contain the total size for the argument block.
1321 Return the original constant component of the argument block's size.
1323 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1324 for arguments passed in registers. */
1326 static int
1327 compute_argument_block_size (reg_parm_stack_space, args_size,
1328 preferred_stack_boundary)
1329 int reg_parm_stack_space;
1330 struct args_size *args_size;
1331 int preferred_stack_boundary ATTRIBUTE_UNUSED;
1333 int unadjusted_args_size = args_size->constant;
1335 /* For accumulate outgoing args mode we don't need to align, since the frame
1336 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1337 backends from generating misaligned frame sizes. */
1338 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1339 preferred_stack_boundary = STACK_BOUNDARY;
1341 /* Compute the actual size of the argument block required. The variable
1342 and constant sizes must be combined, the size may have to be rounded,
1343 and there may be a minimum required size. */
1345 if (args_size->var)
1347 args_size->var = ARGS_SIZE_TREE (*args_size);
1348 args_size->constant = 0;
1350 preferred_stack_boundary /= BITS_PER_UNIT;
1351 if (preferred_stack_boundary > 1)
1353 /* We don't handle this case yet. To handle it correctly we have
1354 to add the delta, round and subtract the delta.
1355 Currently no machine description requires this support. */
1356 if (stack_pointer_delta & (preferred_stack_boundary - 1))
1357 abort ();
1358 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1361 if (reg_parm_stack_space > 0)
1363 args_size->var
1364 = size_binop (MAX_EXPR, args_size->var,
1365 ssize_int (reg_parm_stack_space));
1367 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1368 /* The area corresponding to register parameters is not to count in
1369 the size of the block we need. So make the adjustment. */
1370 args_size->var
1371 = size_binop (MINUS_EXPR, args_size->var,
1372 ssize_int (reg_parm_stack_space));
1373 #endif
1376 else
1378 preferred_stack_boundary /= BITS_PER_UNIT;
1379 if (preferred_stack_boundary < 1)
1380 preferred_stack_boundary = 1;
1381 args_size->constant = (((args_size->constant
1382 + stack_pointer_delta
1383 + preferred_stack_boundary - 1)
1384 / preferred_stack_boundary
1385 * preferred_stack_boundary)
1386 - stack_pointer_delta);
1388 args_size->constant = MAX (args_size->constant,
1389 reg_parm_stack_space);
1391 #ifdef MAYBE_REG_PARM_STACK_SPACE
1392 if (reg_parm_stack_space == 0)
1393 args_size->constant = 0;
1394 #endif
1396 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1397 args_size->constant -= reg_parm_stack_space;
1398 #endif
1400 return unadjusted_args_size;
1403 /* Precompute parameters as needed for a function call.
1405 FLAGS is mask of ECF_* constants.
1407 NUM_ACTUALS is the number of arguments.
1409 ARGS is an array containing information for each argument; this
1410 routine fills in the INITIAL_VALUE and VALUE fields for each
1411 precomputed argument. */
1413 static void
1414 precompute_arguments (flags, num_actuals, args)
1415 int flags;
1416 int num_actuals;
1417 struct arg_data *args;
1419 int i;
1421 /* If this function call is cse'able, precompute all the parameters.
1422 Note that if the parameter is constructed into a temporary, this will
1423 cause an additional copy because the parameter will be constructed
1424 into a temporary location and then copied into the outgoing arguments.
1425 If a parameter contains a call to alloca and this function uses the
1426 stack, precompute the parameter. */
1428 /* If we preallocated the stack space, and some arguments must be passed
1429 on the stack, then we must precompute any parameter which contains a
1430 function call which will store arguments on the stack.
1431 Otherwise, evaluating the parameter may clobber previous parameters
1432 which have already been stored into the stack. (we have code to avoid
1433 such case by saving the outgoing stack arguments, but it results in
1434 worse code) */
1436 for (i = 0; i < num_actuals; i++)
1437 if ((flags & ECF_LIBCALL_BLOCK)
1438 || calls_function (args[i].tree_value, !ACCUMULATE_OUTGOING_ARGS))
1440 enum machine_mode mode;
1442 /* If this is an addressable type, we cannot pre-evaluate it. */
1443 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1444 abort ();
1446 args[i].value
1447 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1449 /* ANSI doesn't require a sequence point here,
1450 but PCC has one, so this will avoid some problems. */
1451 emit_queue ();
1453 args[i].initial_value = args[i].value
1454 = protect_from_queue (args[i].value, 0);
1456 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1457 if (mode != args[i].mode)
1459 args[i].value
1460 = convert_modes (args[i].mode, mode,
1461 args[i].value, args[i].unsignedp);
1462 #ifdef PROMOTE_FOR_CALL_ONLY
1463 /* CSE will replace this only if it contains args[i].value
1464 pseudo, so convert it down to the declared mode using
1465 a SUBREG. */
1466 if (GET_CODE (args[i].value) == REG
1467 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1469 args[i].initial_value
1470 = gen_lowpart_SUBREG (mode, args[i].value);
1471 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1472 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1473 args[i].unsignedp);
1475 #endif
1480 /* Given the current state of MUST_PREALLOCATE and information about
1481 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1482 compute and return the final value for MUST_PREALLOCATE. */
1484 static int
1485 finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1486 int must_preallocate;
1487 int num_actuals;
1488 struct arg_data *args;
1489 struct args_size *args_size;
1491 /* See if we have or want to preallocate stack space.
1493 If we would have to push a partially-in-regs parm
1494 before other stack parms, preallocate stack space instead.
1496 If the size of some parm is not a multiple of the required stack
1497 alignment, we must preallocate.
1499 If the total size of arguments that would otherwise create a copy in
1500 a temporary (such as a CALL) is more than half the total argument list
1501 size, preallocation is faster.
1503 Another reason to preallocate is if we have a machine (like the m88k)
1504 where stack alignment is required to be maintained between every
1505 pair of insns, not just when the call is made. However, we assume here
1506 that such machines either do not have push insns (and hence preallocation
1507 would occur anyway) or the problem is taken care of with
1508 PUSH_ROUNDING. */
1510 if (! must_preallocate)
1512 int partial_seen = 0;
1513 int copy_to_evaluate_size = 0;
1514 int i;
1516 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1518 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1519 partial_seen = 1;
1520 else if (partial_seen && args[i].reg == 0)
1521 must_preallocate = 1;
1523 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1524 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1525 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1526 || TREE_CODE (args[i].tree_value) == COND_EXPR
1527 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1528 copy_to_evaluate_size
1529 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1532 if (copy_to_evaluate_size * 2 >= args_size->constant
1533 && args_size->constant > 0)
1534 must_preallocate = 1;
1536 return must_preallocate;
1539 /* If we preallocated stack space, compute the address of each argument
1540 and store it into the ARGS array.
1542 We need not ensure it is a valid memory address here; it will be
1543 validized when it is used.
1545 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1547 static void
1548 compute_argument_addresses (args, argblock, num_actuals)
1549 struct arg_data *args;
1550 rtx argblock;
1551 int num_actuals;
1553 if (argblock)
1555 rtx arg_reg = argblock;
1556 int i, arg_offset = 0;
1558 if (GET_CODE (argblock) == PLUS)
1559 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1561 for (i = 0; i < num_actuals; i++)
1563 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1564 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1565 rtx addr;
1567 /* Skip this parm if it will not be passed on the stack. */
1568 if (! args[i].pass_on_stack && args[i].reg != 0)
1569 continue;
1571 if (GET_CODE (offset) == CONST_INT)
1572 addr = plus_constant (arg_reg, INTVAL (offset));
1573 else
1574 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1576 addr = plus_constant (addr, arg_offset);
1577 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1578 set_mem_align (args[i].stack, PARM_BOUNDARY);
1579 set_mem_attributes (args[i].stack,
1580 TREE_TYPE (args[i].tree_value), 1);
1582 if (GET_CODE (slot_offset) == CONST_INT)
1583 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1584 else
1585 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1587 addr = plus_constant (addr, arg_offset);
1588 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1589 set_mem_align (args[i].stack_slot, PARM_BOUNDARY);
1590 set_mem_attributes (args[i].stack_slot,
1591 TREE_TYPE (args[i].tree_value), 1);
1593 /* Function incoming arguments may overlap with sibling call
1594 outgoing arguments and we cannot allow reordering of reads
1595 from function arguments with stores to outgoing arguments
1596 of sibling calls. */
1597 set_mem_alias_set (args[i].stack, 0);
1598 set_mem_alias_set (args[i].stack_slot, 0);
1603 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1604 in a call instruction.
1606 FNDECL is the tree node for the target function. For an indirect call
1607 FNDECL will be NULL_TREE.
1609 ADDR is the operand 0 of CALL_EXPR for this call. */
1611 static rtx
1612 rtx_for_function_call (fndecl, addr)
1613 tree fndecl;
1614 tree addr;
1616 rtx funexp;
1618 /* Get the function to call, in the form of RTL. */
1619 if (fndecl)
1621 /* If this is the first use of the function, see if we need to
1622 make an external definition for it. */
1623 if (! TREE_USED (fndecl))
1625 assemble_external (fndecl);
1626 TREE_USED (fndecl) = 1;
1629 /* Get a SYMBOL_REF rtx for the function address. */
1630 funexp = XEXP (DECL_RTL (fndecl), 0);
1632 else
1633 /* Generate an rtx (probably a pseudo-register) for the address. */
1635 push_temp_slots ();
1636 funexp = expand_expr (addr, NULL_RTX, VOIDmode, 0);
1637 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1638 emit_queue ();
1640 return funexp;
1643 /* Do the register loads required for any wholly-register parms or any
1644 parms which are passed both on the stack and in a register. Their
1645 expressions were already evaluated.
1647 Mark all register-parms as living through the call, putting these USE
1648 insns in the CALL_INSN_FUNCTION_USAGE field.
1650 When IS_SIBCALL, perform the check_sibcall_overlap_argument_overlap
1651 checking, setting *SIBCALL_FAILURE if appropriate. */
1653 static void
1654 load_register_parameters (args, num_actuals, call_fusage, flags,
1655 is_sibcall, sibcall_failure)
1656 struct arg_data *args;
1657 int num_actuals;
1658 rtx *call_fusage;
1659 int flags;
1660 int is_sibcall;
1661 int *sibcall_failure;
1663 int i, j;
1665 #ifdef LOAD_ARGS_REVERSED
1666 for (i = num_actuals - 1; i >= 0; i--)
1667 #else
1668 for (i = 0; i < num_actuals; i++)
1669 #endif
1671 rtx reg = ((flags & ECF_SIBCALL)
1672 ? args[i].tail_call_reg : args[i].reg);
1673 int partial = args[i].partial;
1674 int nregs;
1676 if (reg)
1678 rtx before_arg = get_last_insn ();
1679 /* Set to non-negative if must move a word at a time, even if just
1680 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1681 we just use a normal move insn. This value can be zero if the
1682 argument is a zero size structure with no fields. */
1683 nregs = (partial ? partial
1684 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1685 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1686 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1687 : -1));
1689 /* Handle calls that pass values in multiple non-contiguous
1690 locations. The Irix 6 ABI has examples of this. */
1692 if (GET_CODE (reg) == PARALLEL)
1693 emit_group_load (reg, args[i].value,
1694 int_size_in_bytes (TREE_TYPE (args[i].tree_value)));
1696 /* If simple case, just do move. If normal partial, store_one_arg
1697 has already loaded the register for us. In all other cases,
1698 load the register(s) from memory. */
1700 else if (nregs == -1)
1701 emit_move_insn (reg, args[i].value);
1703 /* If we have pre-computed the values to put in the registers in
1704 the case of non-aligned structures, copy them in now. */
1706 else if (args[i].n_aligned_regs != 0)
1707 for (j = 0; j < args[i].n_aligned_regs; j++)
1708 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1709 args[i].aligned_regs[j]);
1711 else if (partial == 0 || args[i].pass_on_stack)
1712 move_block_to_reg (REGNO (reg),
1713 validize_mem (args[i].value), nregs,
1714 args[i].mode);
1716 /* When a parameter is a block, and perhaps in other cases, it is
1717 possible that it did a load from an argument slot that was
1718 already clobbered. */
1719 if (is_sibcall
1720 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1721 *sibcall_failure = 1;
1723 /* Handle calls that pass values in multiple non-contiguous
1724 locations. The Irix 6 ABI has examples of this. */
1725 if (GET_CODE (reg) == PARALLEL)
1726 use_group_regs (call_fusage, reg);
1727 else if (nregs == -1)
1728 use_reg (call_fusage, reg);
1729 else
1730 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1735 /* Try to integrate function. See expand_inline_function for documentation
1736 about the parameters. */
1738 static rtx
1739 try_to_integrate (fndecl, actparms, target, ignore, type, structure_value_addr)
1740 tree fndecl;
1741 tree actparms;
1742 rtx target;
1743 int ignore;
1744 tree type;
1745 rtx structure_value_addr;
1747 rtx temp;
1748 rtx before_call;
1749 int i;
1750 rtx old_stack_level = 0;
1751 int reg_parm_stack_space = 0;
1753 #ifdef REG_PARM_STACK_SPACE
1754 #ifdef MAYBE_REG_PARM_STACK_SPACE
1755 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1756 #else
1757 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1758 #endif
1759 #endif
1761 before_call = get_last_insn ();
1763 timevar_push (TV_INTEGRATION);
1765 temp = expand_inline_function (fndecl, actparms, target,
1766 ignore, type,
1767 structure_value_addr);
1769 timevar_pop (TV_INTEGRATION);
1771 /* If inlining succeeded, return. */
1772 if (temp != (rtx) (size_t) - 1)
1774 if (ACCUMULATE_OUTGOING_ARGS)
1776 /* If the outgoing argument list must be preserved, push
1777 the stack before executing the inlined function if it
1778 makes any calls. */
1780 i = reg_parm_stack_space;
1781 if (i > highest_outgoing_arg_in_use)
1782 i = highest_outgoing_arg_in_use;
1783 while (--i >= 0 && stack_usage_map[i] == 0)
1786 if (stack_arg_under_construction || i >= 0)
1788 rtx first_insn
1789 = before_call ? NEXT_INSN (before_call) : get_insns ();
1790 rtx insn = NULL_RTX, seq;
1792 /* Look for a call in the inline function code.
1793 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1794 nonzero then there is a call and it is not necessary
1795 to scan the insns. */
1797 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
1798 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1799 if (GET_CODE (insn) == CALL_INSN)
1800 break;
1802 if (insn)
1804 /* Reserve enough stack space so that the largest
1805 argument list of any function call in the inline
1806 function does not overlap the argument list being
1807 evaluated. This is usually an overestimate because
1808 allocate_dynamic_stack_space reserves space for an
1809 outgoing argument list in addition to the requested
1810 space, but there is no way to ask for stack space such
1811 that an argument list of a certain length can be
1812 safely constructed.
1814 Add the stack space reserved for register arguments, if
1815 any, in the inline function. What is really needed is the
1816 largest value of reg_parm_stack_space in the inline
1817 function, but that is not available. Using the current
1818 value of reg_parm_stack_space is wrong, but gives
1819 correct results on all supported machines. */
1821 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
1822 + reg_parm_stack_space);
1824 start_sequence ();
1825 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1826 allocate_dynamic_stack_space (GEN_INT (adjust),
1827 NULL_RTX, BITS_PER_UNIT);
1828 seq = get_insns ();
1829 end_sequence ();
1830 emit_insn_before (seq, first_insn);
1831 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1836 /* If the result is equivalent to TARGET, return TARGET to simplify
1837 checks in store_expr. They can be equivalent but not equal in the
1838 case of a function that returns BLKmode. */
1839 if (temp != target && rtx_equal_p (temp, target))
1840 return target;
1841 return temp;
1844 /* If inlining failed, mark FNDECL as needing to be compiled
1845 separately after all. If function was declared inline,
1846 give a warning. */
1847 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1848 && optimize > 0 && !TREE_ADDRESSABLE (fndecl))
1850 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1851 warning ("called from here");
1853 (*lang_hooks.mark_addressable) (fndecl);
1854 return (rtx) (size_t) - 1;
1857 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1858 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1859 bytes, then we would need to push some additional bytes to pad the
1860 arguments. So, we compute an adjust to the stack pointer for an
1861 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1862 bytes. Then, when the arguments are pushed the stack will be perfectly
1863 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1864 be popped after the call. Returns the adjustment. */
1866 static int
1867 combine_pending_stack_adjustment_and_call (unadjusted_args_size,
1868 args_size,
1869 preferred_unit_stack_boundary)
1870 int unadjusted_args_size;
1871 struct args_size *args_size;
1872 int preferred_unit_stack_boundary;
1874 /* The number of bytes to pop so that the stack will be
1875 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1876 HOST_WIDE_INT adjustment;
1877 /* The alignment of the stack after the arguments are pushed, if we
1878 just pushed the arguments without adjust the stack here. */
1879 HOST_WIDE_INT unadjusted_alignment;
1881 unadjusted_alignment
1882 = ((stack_pointer_delta + unadjusted_args_size)
1883 % preferred_unit_stack_boundary);
1885 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1886 as possible -- leaving just enough left to cancel out the
1887 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1888 PENDING_STACK_ADJUST is non-negative, and congruent to
1889 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1891 /* Begin by trying to pop all the bytes. */
1892 unadjusted_alignment
1893 = (unadjusted_alignment
1894 - (pending_stack_adjust % preferred_unit_stack_boundary));
1895 adjustment = pending_stack_adjust;
1896 /* Push enough additional bytes that the stack will be aligned
1897 after the arguments are pushed. */
1898 if (preferred_unit_stack_boundary > 1)
1900 if (unadjusted_alignment > 0)
1901 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1902 else
1903 adjustment += unadjusted_alignment;
1906 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1907 bytes after the call. The right number is the entire
1908 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1909 by the arguments in the first place. */
1910 args_size->constant
1911 = pending_stack_adjust - adjustment + unadjusted_args_size;
1913 return adjustment;
1916 /* Scan X expression if it does not dereference any argument slots
1917 we already clobbered by tail call arguments (as noted in stored_args_map
1918 bitmap).
1919 Return nonzero if X expression dereferences such argument slots,
1920 zero otherwise. */
1922 static int
1923 check_sibcall_argument_overlap_1 (x)
1924 rtx x;
1926 RTX_CODE code;
1927 int i, j;
1928 unsigned int k;
1929 const char *fmt;
1931 if (x == NULL_RTX)
1932 return 0;
1934 code = GET_CODE (x);
1936 if (code == MEM)
1938 if (XEXP (x, 0) == current_function_internal_arg_pointer)
1939 i = 0;
1940 else if (GET_CODE (XEXP (x, 0)) == PLUS
1941 && XEXP (XEXP (x, 0), 0) ==
1942 current_function_internal_arg_pointer
1943 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
1944 i = INTVAL (XEXP (XEXP (x, 0), 1));
1945 else
1946 return 0;
1948 #ifdef ARGS_GROW_DOWNWARD
1949 i = -i - GET_MODE_SIZE (GET_MODE (x));
1950 #endif
1952 for (k = 0; k < GET_MODE_SIZE (GET_MODE (x)); k++)
1953 if (i + k < stored_args_map->n_bits
1954 && TEST_BIT (stored_args_map, i + k))
1955 return 1;
1957 return 0;
1960 /* Scan all subexpressions. */
1961 fmt = GET_RTX_FORMAT (code);
1962 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1964 if (*fmt == 'e')
1966 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1967 return 1;
1969 else if (*fmt == 'E')
1971 for (j = 0; j < XVECLEN (x, i); j++)
1972 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1973 return 1;
1976 return 0;
1979 /* Scan sequence after INSN if it does not dereference any argument slots
1980 we already clobbered by tail call arguments (as noted in stored_args_map
1981 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1982 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1983 should be 0). Return nonzero if sequence after INSN dereferences such argument
1984 slots, zero otherwise. */
1986 static int
1987 check_sibcall_argument_overlap (insn, arg, mark_stored_args_map)
1988 rtx insn;
1989 struct arg_data *arg;
1990 int mark_stored_args_map;
1992 int low, high;
1994 if (insn == NULL_RTX)
1995 insn = get_insns ();
1996 else
1997 insn = NEXT_INSN (insn);
1999 for (; insn; insn = NEXT_INSN (insn))
2000 if (INSN_P (insn)
2001 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
2002 break;
2004 if (mark_stored_args_map)
2006 #ifdef ARGS_GROW_DOWNWARD
2007 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
2008 #else
2009 low = arg->locate.slot_offset.constant;
2010 #endif
2012 for (high = low + arg->locate.size.constant; low < high; low++)
2013 SET_BIT (stored_args_map, low);
2015 return insn != NULL_RTX;
2018 static tree
2019 fix_unsafe_tree (t)
2020 tree t;
2022 switch (unsafe_for_reeval (t))
2024 case 0: /* Safe. */
2025 break;
2027 case 1: /* Mildly unsafe. */
2028 t = unsave_expr (t);
2029 break;
2031 case 2: /* Wildly unsafe. */
2033 tree var = build_decl (VAR_DECL, NULL_TREE,
2034 TREE_TYPE (t));
2035 SET_DECL_RTL (var,
2036 expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL));
2037 t = var;
2039 break;
2041 default:
2042 abort ();
2044 return t;
2047 /* Generate all the code for a function call
2048 and return an rtx for its value.
2049 Store the value in TARGET (specified as an rtx) if convenient.
2050 If the value is stored in TARGET then TARGET is returned.
2051 If IGNORE is nonzero, then we ignore the value of the function call. */
2054 expand_call (exp, target, ignore)
2055 tree exp;
2056 rtx target;
2057 int ignore;
2059 /* Nonzero if we are currently expanding a call. */
2060 static int currently_expanding_call = 0;
2062 /* List of actual parameters. */
2063 tree actparms = TREE_OPERAND (exp, 1);
2064 /* RTX for the function to be called. */
2065 rtx funexp;
2066 /* Sequence of insns to perform a tail recursive "call". */
2067 rtx tail_recursion_insns = NULL_RTX;
2068 /* Sequence of insns to perform a normal "call". */
2069 rtx normal_call_insns = NULL_RTX;
2070 /* Sequence of insns to perform a tail recursive "call". */
2071 rtx tail_call_insns = NULL_RTX;
2072 /* Data type of the function. */
2073 tree funtype;
2074 tree type_arg_types;
2075 /* Declaration of the function being called,
2076 or 0 if the function is computed (not known by name). */
2077 tree fndecl = 0;
2078 rtx insn;
2079 int try_tail_call = 1;
2080 int try_tail_recursion = 1;
2081 int pass;
2083 /* Register in which non-BLKmode value will be returned,
2084 or 0 if no value or if value is BLKmode. */
2085 rtx valreg;
2086 /* Address where we should return a BLKmode value;
2087 0 if value not BLKmode. */
2088 rtx structure_value_addr = 0;
2089 /* Nonzero if that address is being passed by treating it as
2090 an extra, implicit first parameter. Otherwise,
2091 it is passed by being copied directly into struct_value_rtx. */
2092 int structure_value_addr_parm = 0;
2093 /* Size of aggregate value wanted, or zero if none wanted
2094 or if we are using the non-reentrant PCC calling convention
2095 or expecting the value in registers. */
2096 HOST_WIDE_INT struct_value_size = 0;
2097 /* Nonzero if called function returns an aggregate in memory PCC style,
2098 by returning the address of where to find it. */
2099 int pcc_struct_value = 0;
2101 /* Number of actual parameters in this call, including struct value addr. */
2102 int num_actuals;
2103 /* Number of named args. Args after this are anonymous ones
2104 and they must all go on the stack. */
2105 int n_named_args;
2107 /* Vector of information about each argument.
2108 Arguments are numbered in the order they will be pushed,
2109 not the order they are written. */
2110 struct arg_data *args;
2112 /* Total size in bytes of all the stack-parms scanned so far. */
2113 struct args_size args_size;
2114 struct args_size adjusted_args_size;
2115 /* Size of arguments before any adjustments (such as rounding). */
2116 int unadjusted_args_size;
2117 /* Data on reg parms scanned so far. */
2118 CUMULATIVE_ARGS args_so_far;
2119 /* Nonzero if a reg parm has been scanned. */
2120 int reg_parm_seen;
2121 /* Nonzero if this is an indirect function call. */
2123 /* Nonzero if we must avoid push-insns in the args for this call.
2124 If stack space is allocated for register parameters, but not by the
2125 caller, then it is preallocated in the fixed part of the stack frame.
2126 So the entire argument block must then be preallocated (i.e., we
2127 ignore PUSH_ROUNDING in that case). */
2129 int must_preallocate = !PUSH_ARGS;
2131 /* Size of the stack reserved for parameter registers. */
2132 int reg_parm_stack_space = 0;
2134 /* Address of space preallocated for stack parms
2135 (on machines that lack push insns), or 0 if space not preallocated. */
2136 rtx argblock = 0;
2138 /* Mask of ECF_ flags. */
2139 int flags = 0;
2140 /* Nonzero if this is a call to an inline function. */
2141 int is_integrable = 0;
2142 #ifdef REG_PARM_STACK_SPACE
2143 /* Define the boundary of the register parm stack space that needs to be
2144 saved, if any. */
2145 int low_to_save, high_to_save;
2146 rtx save_area = 0; /* Place that it is saved */
2147 #endif
2149 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2150 char *initial_stack_usage_map = stack_usage_map;
2152 int old_stack_allocated;
2154 /* State variables to track stack modifications. */
2155 rtx old_stack_level = 0;
2156 int old_stack_arg_under_construction = 0;
2157 int old_pending_adj = 0;
2158 int old_inhibit_defer_pop = inhibit_defer_pop;
2160 /* Some stack pointer alterations we make are performed via
2161 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2162 which we then also need to save/restore along the way. */
2163 int old_stack_pointer_delta = 0;
2165 rtx call_fusage;
2166 tree p = TREE_OPERAND (exp, 0);
2167 tree addr = TREE_OPERAND (exp, 0);
2168 int i;
2169 /* The alignment of the stack, in bits. */
2170 HOST_WIDE_INT preferred_stack_boundary;
2171 /* The alignment of the stack, in bytes. */
2172 HOST_WIDE_INT preferred_unit_stack_boundary;
2174 /* See if this is "nothrow" function call. */
2175 if (TREE_NOTHROW (exp))
2176 flags |= ECF_NOTHROW;
2178 /* See if we can find a DECL-node for the actual function.
2179 As a result, decide whether this is a call to an integrable function. */
2181 fndecl = get_callee_fndecl (exp);
2182 if (fndecl)
2184 if (!flag_no_inline
2185 && fndecl != current_function_decl
2186 && DECL_INLINE (fndecl)
2187 && DECL_SAVED_INSNS (fndecl)
2188 && DECL_SAVED_INSNS (fndecl)->inlinable)
2189 is_integrable = 1;
2190 else if (! TREE_ADDRESSABLE (fndecl))
2192 /* In case this function later becomes inlinable,
2193 record that there was already a non-inline call to it.
2195 Use abstraction instead of setting TREE_ADDRESSABLE
2196 directly. */
2197 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
2198 && optimize > 0)
2200 warning_with_decl (fndecl, "can't inline call to `%s'");
2201 warning ("called from here");
2203 (*lang_hooks.mark_addressable) (fndecl);
2206 flags |= flags_from_decl_or_type (fndecl);
2209 /* If we don't have specific function to call, see if we have a
2210 attributes set in the type. */
2211 else
2212 flags |= flags_from_decl_or_type (TREE_TYPE (TREE_TYPE (p)));
2214 /* Warn if this value is an aggregate type,
2215 regardless of which calling convention we are using for it. */
2216 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
2217 warning ("function call has aggregate value");
2219 /* If the result of a pure or const function call is ignored (or void),
2220 and none of its arguments are volatile, we can avoid expanding the
2221 call and just evaluate the arguments for side-effects. */
2222 if ((flags & (ECF_CONST | ECF_PURE))
2223 && (ignore || target == const0_rtx
2224 || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
2226 bool volatilep = false;
2227 tree arg;
2229 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
2230 if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
2232 volatilep = true;
2233 break;
2236 if (! volatilep)
2238 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
2239 expand_expr (TREE_VALUE (arg), const0_rtx,
2240 VOIDmode, EXPAND_NORMAL);
2241 return const0_rtx;
2245 #ifdef REG_PARM_STACK_SPACE
2246 #ifdef MAYBE_REG_PARM_STACK_SPACE
2247 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2248 #else
2249 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2250 #endif
2251 #endif
2253 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2254 if (reg_parm_stack_space > 0 && PUSH_ARGS)
2255 must_preallocate = 1;
2256 #endif
2258 /* Set up a place to return a structure. */
2260 /* Cater to broken compilers. */
2261 if (aggregate_value_p (exp))
2263 /* This call returns a big structure. */
2264 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
2266 #ifdef PCC_STATIC_STRUCT_RETURN
2268 pcc_struct_value = 1;
2269 /* Easier than making that case work right. */
2270 if (is_integrable)
2272 /* In case this is a static function, note that it has been
2273 used. */
2274 if (! TREE_ADDRESSABLE (fndecl))
2275 (*lang_hooks.mark_addressable) (fndecl);
2276 is_integrable = 0;
2279 #else /* not PCC_STATIC_STRUCT_RETURN */
2281 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2283 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (exp))
2285 /* The structure value address arg is already in actparms.
2286 Pull it out. It might be nice to just leave it there, but
2287 we need to set structure_value_addr. */
2288 tree return_arg = TREE_VALUE (actparms);
2289 actparms = TREE_CHAIN (actparms);
2290 structure_value_addr = expand_expr (return_arg, NULL_RTX,
2291 VOIDmode, EXPAND_NORMAL);
2293 else if (target && GET_CODE (target) == MEM)
2294 structure_value_addr = XEXP (target, 0);
2295 else
2297 /* For variable-sized objects, we must be called with a target
2298 specified. If we were to allocate space on the stack here,
2299 we would have no way of knowing when to free it. */
2300 rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
2302 mark_temp_addr_taken (d);
2303 structure_value_addr = XEXP (d, 0);
2304 target = 0;
2307 #endif /* not PCC_STATIC_STRUCT_RETURN */
2310 /* If called function is inline, try to integrate it. */
2312 if (is_integrable)
2314 rtx temp = try_to_integrate (fndecl, actparms, target,
2315 ignore, TREE_TYPE (exp),
2316 structure_value_addr);
2317 if (temp != (rtx) (size_t) - 1)
2318 return temp;
2321 /* Figure out the amount to which the stack should be aligned. */
2322 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2323 if (fndecl)
2325 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2326 if (i && i->preferred_incoming_stack_boundary)
2327 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2330 /* Operand 0 is a pointer-to-function; get the type of the function. */
2331 funtype = TREE_TYPE (addr);
2332 if (! POINTER_TYPE_P (funtype))
2333 abort ();
2334 funtype = TREE_TYPE (funtype);
2336 /* Munge the tree to split complex arguments into their imaginary
2337 and real parts. */
2338 if (SPLIT_COMPLEX_ARGS)
2340 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2341 actparms = split_complex_values (actparms);
2343 else
2344 type_arg_types = TYPE_ARG_TYPES (funtype);
2346 /* See if this is a call to a function that can return more than once
2347 or a call to longjmp or malloc. */
2348 flags |= special_function_p (fndecl, flags);
2350 if (flags & ECF_MAY_BE_ALLOCA)
2351 current_function_calls_alloca = 1;
2353 /* If struct_value_rtx is 0, it means pass the address
2354 as if it were an extra parameter. */
2355 if (structure_value_addr && struct_value_rtx == 0)
2357 /* If structure_value_addr is a REG other than
2358 virtual_outgoing_args_rtx, we can use always use it. If it
2359 is not a REG, we must always copy it into a register.
2360 If it is virtual_outgoing_args_rtx, we must copy it to another
2361 register in some cases. */
2362 rtx temp = (GET_CODE (structure_value_addr) != REG
2363 || (ACCUMULATE_OUTGOING_ARGS
2364 && stack_arg_under_construction
2365 && structure_value_addr == virtual_outgoing_args_rtx)
2366 ? copy_addr_to_reg (structure_value_addr)
2367 : structure_value_addr);
2369 actparms
2370 = tree_cons (error_mark_node,
2371 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2372 temp),
2373 actparms);
2374 structure_value_addr_parm = 1;
2377 /* Count the arguments and set NUM_ACTUALS. */
2378 for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2379 num_actuals++;
2381 /* Compute number of named args.
2382 Normally, don't include the last named arg if anonymous args follow.
2383 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
2384 (If no anonymous args follow, the result of list_length is actually
2385 one too large. This is harmless.)
2387 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
2388 zero, this machine will be able to place unnamed args that were
2389 passed in registers into the stack. So treat all args as named.
2390 This allows the insns emitting for a specific argument list to be
2391 independent of the function declaration.
2393 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any
2394 reliable way to pass unnamed args in registers, so we must force
2395 them into memory. */
2397 if ((STRICT_ARGUMENT_NAMING
2398 || ! PRETEND_OUTGOING_VARARGS_NAMED)
2399 && type_arg_types != 0)
2400 n_named_args
2401 = (list_length (type_arg_types)
2402 /* Don't include the last named arg. */
2403 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
2404 /* Count the struct value address, if it is passed as a parm. */
2405 + structure_value_addr_parm);
2406 else
2407 /* If we know nothing, treat all args as named. */
2408 n_named_args = num_actuals;
2410 /* Start updating where the next arg would go.
2412 On some machines (such as the PA) indirect calls have a different
2413 calling convention than normal calls. The last argument in
2414 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2415 or not. */
2416 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl);
2418 /* Make a vector to hold all the information about each arg. */
2419 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
2420 memset ((char *) args, 0, num_actuals * sizeof (struct arg_data));
2422 /* Build up entries in the ARGS array, compute the size of the
2423 arguments into ARGS_SIZE, etc. */
2424 initialize_argument_information (num_actuals, args, &args_size,
2425 n_named_args, actparms, fndecl,
2426 &args_so_far, reg_parm_stack_space,
2427 &old_stack_level, &old_pending_adj,
2428 &must_preallocate, &flags);
2430 if (args_size.var)
2432 /* If this function requires a variable-sized argument list, don't
2433 try to make a cse'able block for this call. We may be able to
2434 do this eventually, but it is too complicated to keep track of
2435 what insns go in the cse'able block and which don't. */
2437 flags &= ~ECF_LIBCALL_BLOCK;
2438 must_preallocate = 1;
2441 /* Now make final decision about preallocating stack space. */
2442 must_preallocate = finalize_must_preallocate (must_preallocate,
2443 num_actuals, args,
2444 &args_size);
2446 /* If the structure value address will reference the stack pointer, we
2447 must stabilize it. We don't need to do this if we know that we are
2448 not going to adjust the stack pointer in processing this call. */
2450 if (structure_value_addr
2451 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2452 || reg_mentioned_p (virtual_outgoing_args_rtx,
2453 structure_value_addr))
2454 && (args_size.var
2455 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2456 structure_value_addr = copy_to_reg (structure_value_addr);
2458 /* Tail calls can make things harder to debug, and we're traditionally
2459 pushed these optimizations into -O2. Don't try if we're already
2460 expanding a call, as that means we're an argument. Don't try if
2461 there's cleanups, as we know there's code to follow the call.
2463 If rtx_equal_function_value_matters is false, that means we've
2464 finished with regular parsing. Which means that some of the
2465 machinery we use to generate tail-calls is no longer in place.
2466 This is most often true of sjlj-exceptions, which we couldn't
2467 tail-call to anyway. */
2469 if (currently_expanding_call++ != 0
2470 || !flag_optimize_sibling_calls
2471 || !rtx_equal_function_value_matters
2472 || any_pending_cleanups (1)
2473 || args_size.var)
2474 try_tail_call = try_tail_recursion = 0;
2476 /* Tail recursion fails, when we are not dealing with recursive calls. */
2477 if (!try_tail_recursion
2478 || TREE_CODE (addr) != ADDR_EXPR
2479 || TREE_OPERAND (addr, 0) != current_function_decl)
2480 try_tail_recursion = 0;
2482 /* Rest of purposes for tail call optimizations to fail. */
2483 if (
2484 #ifdef HAVE_sibcall_epilogue
2485 !HAVE_sibcall_epilogue
2486 #else
2488 #endif
2489 || !try_tail_call
2490 /* Doing sibling call optimization needs some work, since
2491 structure_value_addr can be allocated on the stack.
2492 It does not seem worth the effort since few optimizable
2493 sibling calls will return a structure. */
2494 || structure_value_addr != NULL_RTX
2495 /* Check whether the target is able to optimize the call
2496 into a sibcall. */
2497 || !(*targetm.function_ok_for_sibcall) (fndecl, exp)
2498 /* Functions that do not return exactly once may not be sibcall
2499 optimized. */
2500 || (flags & (ECF_RETURNS_TWICE | ECF_LONGJMP | ECF_NORETURN))
2501 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2502 /* If the called function is nested in the current one, it might access
2503 some of the caller's arguments, but could clobber them beforehand if
2504 the argument areas are shared. */
2505 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2506 /* If this function requires more stack slots than the current
2507 function, we cannot change it into a sibling call. */
2508 || args_size.constant > current_function_args_size
2509 /* If the callee pops its own arguments, then it must pop exactly
2510 the same number of arguments as the current function. */
2511 || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2512 != RETURN_POPS_ARGS (current_function_decl,
2513 TREE_TYPE (current_function_decl),
2514 current_function_args_size))
2515 || !(*lang_hooks.decls.ok_for_sibcall) (fndecl))
2516 try_tail_call = 0;
2518 if (try_tail_call || try_tail_recursion)
2520 int end, inc;
2521 actparms = NULL_TREE;
2522 /* Ok, we're going to give the tail call the old college try.
2523 This means we're going to evaluate the function arguments
2524 up to three times. There are two degrees of badness we can
2525 encounter, those that can be unsaved and those that can't.
2526 (See unsafe_for_reeval commentary for details.)
2528 Generate a new argument list. Pass safe arguments through
2529 unchanged. For the easy badness wrap them in UNSAVE_EXPRs.
2530 For hard badness, evaluate them now and put their resulting
2531 rtx in a temporary VAR_DECL.
2533 initialize_argument_information has ordered the array for the
2534 order to be pushed, and we must remember this when reconstructing
2535 the original argument order. */
2537 if (PUSH_ARGS_REVERSED)
2539 inc = 1;
2540 i = 0;
2541 end = num_actuals;
2543 else
2545 inc = -1;
2546 i = num_actuals - 1;
2547 end = -1;
2550 for (; i != end; i += inc)
2552 args[i].tree_value = fix_unsafe_tree (args[i].tree_value);
2553 /* We need to build actparms for optimize_tail_recursion. We can
2554 safely trash away TREE_PURPOSE, since it is unused by this
2555 function. */
2556 if (try_tail_recursion)
2557 actparms = tree_cons (NULL_TREE, args[i].tree_value, actparms);
2559 /* Do the same for the function address if it is an expression. */
2560 if (!fndecl)
2561 addr = fix_unsafe_tree (addr);
2562 /* Expanding one of those dangerous arguments could have added
2563 cleanups, but otherwise give it a whirl. */
2564 if (any_pending_cleanups (1))
2565 try_tail_call = try_tail_recursion = 0;
2568 /* Generate a tail recursion sequence when calling ourselves. */
2570 if (try_tail_recursion)
2572 /* We want to emit any pending stack adjustments before the tail
2573 recursion "call". That way we know any adjustment after the tail
2574 recursion call can be ignored if we indeed use the tail recursion
2575 call expansion. */
2576 int save_pending_stack_adjust = pending_stack_adjust;
2577 int save_stack_pointer_delta = stack_pointer_delta;
2579 /* Emit any queued insns now; otherwise they would end up in
2580 only one of the alternates. */
2581 emit_queue ();
2583 /* Use a new sequence to hold any RTL we generate. We do not even
2584 know if we will use this RTL yet. The final decision can not be
2585 made until after RTL generation for the entire function is
2586 complete. */
2587 start_sequence ();
2588 /* If expanding any of the arguments creates cleanups, we can't
2589 do a tailcall. So, we'll need to pop the pending cleanups
2590 list. If, however, all goes well, and there are no cleanups
2591 then the call to expand_start_target_temps will have no
2592 effect. */
2593 expand_start_target_temps ();
2594 if (optimize_tail_recursion (actparms, get_last_insn ()))
2596 if (any_pending_cleanups (1))
2597 try_tail_call = try_tail_recursion = 0;
2598 else
2599 tail_recursion_insns = get_insns ();
2601 expand_end_target_temps ();
2602 end_sequence ();
2604 /* Restore the original pending stack adjustment for the sibling and
2605 normal call cases below. */
2606 pending_stack_adjust = save_pending_stack_adjust;
2607 stack_pointer_delta = save_stack_pointer_delta;
2610 if (profile_arc_flag && (flags & ECF_FORK_OR_EXEC))
2612 /* A fork duplicates the profile information, and an exec discards
2613 it. We can't rely on fork/exec to be paired. So write out the
2614 profile information we have gathered so far, and clear it. */
2615 /* ??? When Linux's __clone is called with CLONE_VM set, profiling
2616 is subject to race conditions, just as with multithreaded
2617 programs. */
2619 emit_library_call (gcov_flush_libfunc, LCT_ALWAYS_RETURN, VOIDmode, 0);
2622 /* Ensure current function's preferred stack boundary is at least
2623 what we need. We don't have to increase alignment for recursive
2624 functions. */
2625 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2626 && fndecl != current_function_decl)
2627 cfun->preferred_stack_boundary = preferred_stack_boundary;
2628 if (fndecl == current_function_decl)
2629 cfun->recursive_call_emit = true;
2631 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2633 function_call_count++;
2635 /* We want to make two insn chains; one for a sibling call, the other
2636 for a normal call. We will select one of the two chains after
2637 initial RTL generation is complete. */
2638 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2640 int sibcall_failure = 0;
2641 /* We want to emit any pending stack adjustments before the tail
2642 recursion "call". That way we know any adjustment after the tail
2643 recursion call can be ignored if we indeed use the tail recursion
2644 call expansion. */
2645 int save_pending_stack_adjust = 0;
2646 int save_stack_pointer_delta = 0;
2647 rtx insns;
2648 rtx before_call, next_arg_reg;
2650 if (pass == 0)
2652 /* Emit any queued insns now; otherwise they would end up in
2653 only one of the alternates. */
2654 emit_queue ();
2656 /* State variables we need to save and restore between
2657 iterations. */
2658 save_pending_stack_adjust = pending_stack_adjust;
2659 save_stack_pointer_delta = stack_pointer_delta;
2661 if (pass)
2662 flags &= ~ECF_SIBCALL;
2663 else
2664 flags |= ECF_SIBCALL;
2666 /* Other state variables that we must reinitialize each time
2667 through the loop (that are not initialized by the loop itself). */
2668 argblock = 0;
2669 call_fusage = 0;
2671 /* Start a new sequence for the normal call case.
2673 From this point on, if the sibling call fails, we want to set
2674 sibcall_failure instead of continuing the loop. */
2675 start_sequence ();
2677 if (pass == 0)
2679 /* We know at this point that there are not currently any
2680 pending cleanups. If, however, in the process of evaluating
2681 the arguments we were to create some, we'll need to be
2682 able to get rid of them. */
2683 expand_start_target_temps ();
2686 /* Don't let pending stack adjusts add up to too much.
2687 Also, do all pending adjustments now if there is any chance
2688 this might be a call to alloca or if we are expanding a sibling
2689 call sequence or if we are calling a function that is to return
2690 with stack pointer depressed. */
2691 if (pending_stack_adjust >= 32
2692 || (pending_stack_adjust > 0
2693 && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
2694 || pass == 0)
2695 do_pending_stack_adjust ();
2697 /* When calling a const function, we must pop the stack args right away,
2698 so that the pop is deleted or moved with the call. */
2699 if (pass && (flags & ECF_LIBCALL_BLOCK))
2700 NO_DEFER_POP;
2702 #ifdef FINAL_REG_PARM_STACK_SPACE
2703 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2704 args_size.var);
2705 #endif
2706 /* Precompute any arguments as needed. */
2707 if (pass)
2708 precompute_arguments (flags, num_actuals, args);
2710 /* Now we are about to start emitting insns that can be deleted
2711 if a libcall is deleted. */
2712 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
2713 start_sequence ();
2715 adjusted_args_size = args_size;
2716 /* Compute the actual size of the argument block required. The variable
2717 and constant sizes must be combined, the size may have to be rounded,
2718 and there may be a minimum required size. When generating a sibcall
2719 pattern, do not round up, since we'll be re-using whatever space our
2720 caller provided. */
2721 unadjusted_args_size
2722 = compute_argument_block_size (reg_parm_stack_space,
2723 &adjusted_args_size,
2724 (pass == 0 ? 0
2725 : preferred_stack_boundary));
2727 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2729 /* The argument block when performing a sibling call is the
2730 incoming argument block. */
2731 if (pass == 0)
2733 argblock = virtual_incoming_args_rtx;
2734 argblock
2735 #ifdef STACK_GROWS_DOWNWARD
2736 = plus_constant (argblock, current_function_pretend_args_size);
2737 #else
2738 = plus_constant (argblock, -current_function_pretend_args_size);
2739 #endif
2740 stored_args_map = sbitmap_alloc (args_size.constant);
2741 sbitmap_zero (stored_args_map);
2744 /* If we have no actual push instructions, or shouldn't use them,
2745 make space for all args right now. */
2746 else if (adjusted_args_size.var != 0)
2748 if (old_stack_level == 0)
2750 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2751 old_stack_pointer_delta = stack_pointer_delta;
2752 old_pending_adj = pending_stack_adjust;
2753 pending_stack_adjust = 0;
2754 /* stack_arg_under_construction says whether a stack arg is
2755 being constructed at the old stack level. Pushing the stack
2756 gets a clean outgoing argument block. */
2757 old_stack_arg_under_construction = stack_arg_under_construction;
2758 stack_arg_under_construction = 0;
2760 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2762 else
2764 /* Note that we must go through the motions of allocating an argument
2765 block even if the size is zero because we may be storing args
2766 in the area reserved for register arguments, which may be part of
2767 the stack frame. */
2769 int needed = adjusted_args_size.constant;
2771 /* Store the maximum argument space used. It will be pushed by
2772 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2773 checking). */
2775 if (needed > current_function_outgoing_args_size)
2776 current_function_outgoing_args_size = needed;
2778 if (must_preallocate)
2780 if (ACCUMULATE_OUTGOING_ARGS)
2782 /* Since the stack pointer will never be pushed, it is
2783 possible for the evaluation of a parm to clobber
2784 something we have already written to the stack.
2785 Since most function calls on RISC machines do not use
2786 the stack, this is uncommon, but must work correctly.
2788 Therefore, we save any area of the stack that was already
2789 written and that we are using. Here we set up to do this
2790 by making a new stack usage map from the old one. The
2791 actual save will be done by store_one_arg.
2793 Another approach might be to try to reorder the argument
2794 evaluations to avoid this conflicting stack usage. */
2796 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2797 /* Since we will be writing into the entire argument area,
2798 the map must be allocated for its entire size, not just
2799 the part that is the responsibility of the caller. */
2800 needed += reg_parm_stack_space;
2801 #endif
2803 #ifdef ARGS_GROW_DOWNWARD
2804 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2805 needed + 1);
2806 #else
2807 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2808 needed);
2809 #endif
2810 stack_usage_map
2811 = (char *) alloca (highest_outgoing_arg_in_use);
2813 if (initial_highest_arg_in_use)
2814 memcpy (stack_usage_map, initial_stack_usage_map,
2815 initial_highest_arg_in_use);
2817 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2818 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2819 (highest_outgoing_arg_in_use
2820 - initial_highest_arg_in_use));
2821 needed = 0;
2823 /* The address of the outgoing argument list must not be
2824 copied to a register here, because argblock would be left
2825 pointing to the wrong place after the call to
2826 allocate_dynamic_stack_space below. */
2828 argblock = virtual_outgoing_args_rtx;
2830 else
2832 if (inhibit_defer_pop == 0)
2834 /* Try to reuse some or all of the pending_stack_adjust
2835 to get this space. */
2836 needed
2837 = (combine_pending_stack_adjustment_and_call
2838 (unadjusted_args_size,
2839 &adjusted_args_size,
2840 preferred_unit_stack_boundary));
2842 /* combine_pending_stack_adjustment_and_call computes
2843 an adjustment before the arguments are allocated.
2844 Account for them and see whether or not the stack
2845 needs to go up or down. */
2846 needed = unadjusted_args_size - needed;
2848 if (needed < 0)
2850 /* We're releasing stack space. */
2851 /* ??? We can avoid any adjustment at all if we're
2852 already aligned. FIXME. */
2853 pending_stack_adjust = -needed;
2854 do_pending_stack_adjust ();
2855 needed = 0;
2857 else
2858 /* We need to allocate space. We'll do that in
2859 push_block below. */
2860 pending_stack_adjust = 0;
2863 /* Special case this because overhead of `push_block' in
2864 this case is non-trivial. */
2865 if (needed == 0)
2866 argblock = virtual_outgoing_args_rtx;
2867 else
2869 argblock = push_block (GEN_INT (needed), 0, 0);
2870 #ifdef ARGS_GROW_DOWNWARD
2871 argblock = plus_constant (argblock, needed);
2872 #endif
2875 /* We only really need to call `copy_to_reg' in the case
2876 where push insns are going to be used to pass ARGBLOCK
2877 to a function call in ARGS. In that case, the stack
2878 pointer changes value from the allocation point to the
2879 call point, and hence the value of
2880 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2881 as well always do it. */
2882 argblock = copy_to_reg (argblock);
2887 if (ACCUMULATE_OUTGOING_ARGS)
2889 /* The save/restore code in store_one_arg handles all
2890 cases except one: a constructor call (including a C
2891 function returning a BLKmode struct) to initialize
2892 an argument. */
2893 if (stack_arg_under_construction)
2895 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2896 rtx push_size = GEN_INT (reg_parm_stack_space
2897 + adjusted_args_size.constant);
2898 #else
2899 rtx push_size = GEN_INT (adjusted_args_size.constant);
2900 #endif
2901 if (old_stack_level == 0)
2903 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2904 NULL_RTX);
2905 old_stack_pointer_delta = stack_pointer_delta;
2906 old_pending_adj = pending_stack_adjust;
2907 pending_stack_adjust = 0;
2908 /* stack_arg_under_construction says whether a stack
2909 arg is being constructed at the old stack level.
2910 Pushing the stack gets a clean outgoing argument
2911 block. */
2912 old_stack_arg_under_construction
2913 = stack_arg_under_construction;
2914 stack_arg_under_construction = 0;
2915 /* Make a new map for the new argument list. */
2916 stack_usage_map = (char *)
2917 alloca (highest_outgoing_arg_in_use);
2918 memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
2919 highest_outgoing_arg_in_use = 0;
2921 allocate_dynamic_stack_space (push_size, NULL_RTX,
2922 BITS_PER_UNIT);
2925 /* If argument evaluation might modify the stack pointer,
2926 copy the address of the argument list to a register. */
2927 for (i = 0; i < num_actuals; i++)
2928 if (args[i].pass_on_stack)
2930 argblock = copy_addr_to_reg (argblock);
2931 break;
2935 compute_argument_addresses (args, argblock, num_actuals);
2937 /* If we push args individually in reverse order, perform stack alignment
2938 before the first push (the last arg). */
2939 if (PUSH_ARGS_REVERSED && argblock == 0
2940 && adjusted_args_size.constant != unadjusted_args_size)
2942 /* When the stack adjustment is pending, we get better code
2943 by combining the adjustments. */
2944 if (pending_stack_adjust
2945 && ! (flags & ECF_LIBCALL_BLOCK)
2946 && ! inhibit_defer_pop)
2948 pending_stack_adjust
2949 = (combine_pending_stack_adjustment_and_call
2950 (unadjusted_args_size,
2951 &adjusted_args_size,
2952 preferred_unit_stack_boundary));
2953 do_pending_stack_adjust ();
2955 else if (argblock == 0)
2956 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2957 - unadjusted_args_size));
2959 /* Now that the stack is properly aligned, pops can't safely
2960 be deferred during the evaluation of the arguments. */
2961 NO_DEFER_POP;
2963 funexp = rtx_for_function_call (fndecl, addr);
2965 /* Figure out the register where the value, if any, will come back. */
2966 valreg = 0;
2967 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2968 && ! structure_value_addr)
2970 if (pcc_struct_value)
2971 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2972 fndecl, (pass == 0));
2973 else
2974 valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
2977 /* Precompute all register parameters. It isn't safe to compute anything
2978 once we have started filling any specific hard regs. */
2979 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2981 #ifdef REG_PARM_STACK_SPACE
2982 /* Save the fixed argument area if it's part of the caller's frame and
2983 is clobbered by argument setup for this call. */
2984 if (ACCUMULATE_OUTGOING_ARGS && pass)
2985 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2986 &low_to_save, &high_to_save);
2987 #endif
2989 /* Now store (and compute if necessary) all non-register parms.
2990 These come before register parms, since they can require block-moves,
2991 which could clobber the registers used for register parms.
2992 Parms which have partial registers are not stored here,
2993 but we do preallocate space here if they want that. */
2995 for (i = 0; i < num_actuals; i++)
2996 if (args[i].reg == 0 || args[i].pass_on_stack)
2998 rtx before_arg = get_last_insn ();
3000 if (store_one_arg (&args[i], argblock, flags,
3001 adjusted_args_size.var != 0,
3002 reg_parm_stack_space)
3003 || (pass == 0
3004 && check_sibcall_argument_overlap (before_arg,
3005 &args[i], 1)))
3006 sibcall_failure = 1;
3009 /* If we have a parm that is passed in registers but not in memory
3010 and whose alignment does not permit a direct copy into registers,
3011 make a group of pseudos that correspond to each register that we
3012 will later fill. */
3013 if (STRICT_ALIGNMENT)
3014 store_unaligned_arguments_into_pseudos (args, num_actuals);
3016 /* Now store any partially-in-registers parm.
3017 This is the last place a block-move can happen. */
3018 if (reg_parm_seen)
3019 for (i = 0; i < num_actuals; i++)
3020 if (args[i].partial != 0 && ! args[i].pass_on_stack)
3022 rtx before_arg = get_last_insn ();
3024 if (store_one_arg (&args[i], argblock, flags,
3025 adjusted_args_size.var != 0,
3026 reg_parm_stack_space)
3027 || (pass == 0
3028 && check_sibcall_argument_overlap (before_arg,
3029 &args[i], 1)))
3030 sibcall_failure = 1;
3033 /* If we pushed args in forward order, perform stack alignment
3034 after pushing the last arg. */
3035 if (!PUSH_ARGS_REVERSED && argblock == 0)
3036 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
3037 - unadjusted_args_size));
3039 /* If register arguments require space on the stack and stack space
3040 was not preallocated, allocate stack space here for arguments
3041 passed in registers. */
3042 #ifdef OUTGOING_REG_PARM_STACK_SPACE
3043 if (!ACCUMULATE_OUTGOING_ARGS
3044 && must_preallocate == 0 && reg_parm_stack_space > 0)
3045 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
3046 #endif
3048 /* Pass the function the address in which to return a
3049 structure value. */
3050 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3052 #ifdef POINTERS_EXTEND_UNSIGNED
3053 if (GET_MODE (structure_value_addr) != Pmode)
3054 structure_value_addr = convert_memory_address
3055 (Pmode, structure_value_addr);
3056 #endif
3057 emit_move_insn (struct_value_rtx,
3058 force_reg (Pmode,
3059 force_operand (structure_value_addr,
3060 NULL_RTX)));
3062 if (GET_CODE (struct_value_rtx) == REG)
3063 use_reg (&call_fusage, struct_value_rtx);
3066 funexp = prepare_call_address (funexp, fndecl, &call_fusage,
3067 reg_parm_seen, pass == 0);
3069 load_register_parameters (args, num_actuals, &call_fusage, flags,
3070 pass == 0, &sibcall_failure);
3072 /* Perform postincrements before actually calling the function. */
3073 emit_queue ();
3075 /* Save a pointer to the last insn before the call, so that we can
3076 later safely search backwards to find the CALL_INSN. */
3077 before_call = get_last_insn ();
3079 /* Set up next argument register. For sibling calls on machines
3080 with register windows this should be the incoming register. */
3081 #ifdef FUNCTION_INCOMING_ARG
3082 if (pass == 0)
3083 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
3084 void_type_node, 1);
3085 else
3086 #endif
3087 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
3088 void_type_node, 1);
3090 /* All arguments and registers used for the call must be set up by
3091 now! */
3093 /* Stack must be properly aligned now. */
3094 if (pass && stack_pointer_delta % preferred_unit_stack_boundary)
3095 abort ();
3097 /* Generate the actual call instruction. */
3098 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
3099 adjusted_args_size.constant, struct_value_size,
3100 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
3101 flags, & args_so_far);
3103 /* If call is cse'able, make appropriate pair of reg-notes around it.
3104 Test valreg so we don't crash; may safely ignore `const'
3105 if return type is void. Disable for PARALLEL return values, because
3106 we have no way to move such values into a pseudo register. */
3107 if (pass && (flags & ECF_LIBCALL_BLOCK))
3109 rtx insns;
3111 if (valreg == 0 || GET_CODE (valreg) == PARALLEL)
3113 insns = get_insns ();
3114 end_sequence ();
3115 emit_insn (insns);
3117 else
3119 rtx note = 0;
3120 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3122 /* Mark the return value as a pointer if needed. */
3123 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3124 mark_reg_pointer (temp,
3125 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
3127 /* Construct an "equal form" for the value which mentions all the
3128 arguments in order as well as the function name. */
3129 for (i = 0; i < num_actuals; i++)
3130 note = gen_rtx_EXPR_LIST (VOIDmode,
3131 args[i].initial_value, note);
3132 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
3134 insns = get_insns ();
3135 end_sequence ();
3137 if (flags & ECF_PURE)
3138 note = gen_rtx_EXPR_LIST (VOIDmode,
3139 gen_rtx_USE (VOIDmode,
3140 gen_rtx_MEM (BLKmode,
3141 gen_rtx_SCRATCH (VOIDmode))),
3142 note);
3144 emit_libcall_block (insns, temp, valreg, note);
3146 valreg = temp;
3149 else if (pass && (flags & ECF_MALLOC))
3151 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3152 rtx last, insns;
3154 /* The return value from a malloc-like function is a pointer. */
3155 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3156 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
3158 emit_move_insn (temp, valreg);
3160 /* The return value from a malloc-like function can not alias
3161 anything else. */
3162 last = get_last_insn ();
3163 REG_NOTES (last) =
3164 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
3166 /* Write out the sequence. */
3167 insns = get_insns ();
3168 end_sequence ();
3169 emit_insn (insns);
3170 valreg = temp;
3173 /* For calls to `setjmp', etc., inform flow.c it should complain
3174 if nonvolatile values are live. For functions that cannot return,
3175 inform flow that control does not fall through. */
3177 if ((flags & (ECF_NORETURN | ECF_LONGJMP)) || pass == 0)
3179 /* The barrier must be emitted
3180 immediately after the CALL_INSN. Some ports emit more
3181 than just a CALL_INSN above, so we must search for it here. */
3183 rtx last = get_last_insn ();
3184 while (GET_CODE (last) != CALL_INSN)
3186 last = PREV_INSN (last);
3187 /* There was no CALL_INSN? */
3188 if (last == before_call)
3189 abort ();
3192 emit_barrier_after (last);
3195 if (flags & ECF_LONGJMP)
3196 current_function_calls_longjmp = 1;
3198 /* If value type not void, return an rtx for the value. */
3200 /* If there are cleanups to be called, don't use a hard reg as target.
3201 We need to double check this and see if it matters anymore. */
3202 if (any_pending_cleanups (1))
3204 if (target && REG_P (target)
3205 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3206 target = 0;
3207 sibcall_failure = 1;
3210 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
3211 || ignore)
3212 target = const0_rtx;
3213 else if (structure_value_addr)
3215 if (target == 0 || GET_CODE (target) != MEM)
3217 target
3218 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3219 memory_address (TYPE_MODE (TREE_TYPE (exp)),
3220 structure_value_addr));
3221 set_mem_attributes (target, exp, 1);
3224 else if (pcc_struct_value)
3226 /* This is the special C++ case where we need to
3227 know what the true target was. We take care to
3228 never use this value more than once in one expression. */
3229 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3230 copy_to_reg (valreg));
3231 set_mem_attributes (target, exp, 1);
3233 /* Handle calls that return values in multiple non-contiguous locations.
3234 The Irix 6 ABI has examples of this. */
3235 else if (GET_CODE (valreg) == PARALLEL)
3237 if (target == 0)
3239 /* This will only be assigned once, so it can be readonly. */
3240 tree nt = build_qualified_type (TREE_TYPE (exp),
3241 (TYPE_QUALS (TREE_TYPE (exp))
3242 | TYPE_QUAL_CONST));
3244 target = assign_temp (nt, 0, 1, 1);
3245 preserve_temp_slots (target);
3248 if (! rtx_equal_p (target, valreg))
3249 emit_group_store (target, valreg,
3250 int_size_in_bytes (TREE_TYPE (exp)));
3252 /* We can not support sibling calls for this case. */
3253 sibcall_failure = 1;
3255 else if (target
3256 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
3257 && GET_MODE (target) == GET_MODE (valreg))
3259 /* TARGET and VALREG cannot be equal at this point because the
3260 latter would not have REG_FUNCTION_VALUE_P true, while the
3261 former would if it were referring to the same register.
3263 If they refer to the same register, this move will be a no-op,
3264 except when function inlining is being done. */
3265 emit_move_insn (target, valreg);
3267 /* If we are setting a MEM, this code must be executed. Since it is
3268 emitted after the call insn, sibcall optimization cannot be
3269 performed in that case. */
3270 if (GET_CODE (target) == MEM)
3271 sibcall_failure = 1;
3273 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
3275 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
3277 /* We can not support sibling calls for this case. */
3278 sibcall_failure = 1;
3280 else
3281 target = copy_to_reg (valreg);
3283 #ifdef PROMOTE_FUNCTION_RETURN
3284 /* If we promoted this return value, make the proper SUBREG. TARGET
3285 might be const0_rtx here, so be careful. */
3286 if (GET_CODE (target) == REG
3287 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3288 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3290 tree type = TREE_TYPE (exp);
3291 int unsignedp = TREE_UNSIGNED (type);
3292 int offset = 0;
3294 /* If we don't promote as expected, something is wrong. */
3295 if (GET_MODE (target)
3296 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
3297 abort ();
3299 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3300 && GET_MODE_SIZE (GET_MODE (target))
3301 > GET_MODE_SIZE (TYPE_MODE (type)))
3303 offset = GET_MODE_SIZE (GET_MODE (target))
3304 - GET_MODE_SIZE (TYPE_MODE (type));
3305 if (! BYTES_BIG_ENDIAN)
3306 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3307 else if (! WORDS_BIG_ENDIAN)
3308 offset %= UNITS_PER_WORD;
3310 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3311 SUBREG_PROMOTED_VAR_P (target) = 1;
3312 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3314 #endif
3316 /* If size of args is variable or this was a constructor call for a stack
3317 argument, restore saved stack-pointer value. */
3319 if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
3321 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3322 stack_pointer_delta = old_stack_pointer_delta;
3323 pending_stack_adjust = old_pending_adj;
3324 stack_arg_under_construction = old_stack_arg_under_construction;
3325 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3326 stack_usage_map = initial_stack_usage_map;
3327 sibcall_failure = 1;
3329 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3331 #ifdef REG_PARM_STACK_SPACE
3332 if (save_area)
3333 restore_fixed_argument_area (save_area, argblock,
3334 high_to_save, low_to_save);
3335 #endif
3337 /* If we saved any argument areas, restore them. */
3338 for (i = 0; i < num_actuals; i++)
3339 if (args[i].save_area)
3341 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3342 rtx stack_area
3343 = gen_rtx_MEM (save_mode,
3344 memory_address (save_mode,
3345 XEXP (args[i].stack_slot, 0)));
3347 if (save_mode != BLKmode)
3348 emit_move_insn (stack_area, args[i].save_area);
3349 else
3350 emit_block_move (stack_area, args[i].save_area,
3351 GEN_INT (args[i].locate.size.constant),
3352 BLOCK_OP_CALL_PARM);
3355 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3356 stack_usage_map = initial_stack_usage_map;
3359 /* If this was alloca, record the new stack level for nonlocal gotos.
3360 Check for the handler slots since we might not have a save area
3361 for non-local gotos. */
3363 if ((flags & ECF_MAY_BE_ALLOCA) && nonlocal_goto_handler_slots != 0)
3364 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
3366 /* Free up storage we no longer need. */
3367 for (i = 0; i < num_actuals; ++i)
3368 if (args[i].aligned_regs)
3369 free (args[i].aligned_regs);
3371 if (pass == 0)
3373 /* Undo the fake expand_start_target_temps we did earlier. If
3374 there had been any cleanups created, we've already set
3375 sibcall_failure. */
3376 expand_end_target_temps ();
3379 /* If this function is returning into a memory location marked as
3380 readonly, it means it is initializing that location. We normally treat
3381 functions as not clobbering such locations, so we need to specify that
3382 this one does. We do this by adding the appropriate CLOBBER to the
3383 CALL_INSN function usage list. This cannot be done by emitting a
3384 standalone CLOBBER after the call because the latter would be ignored
3385 by at least the delay slot scheduling pass. We do this now instead of
3386 adding to call_fusage before the call to emit_call_1 because TARGET
3387 may be modified in the meantime. */
3388 if (structure_value_addr != 0 && target != 0
3389 && GET_CODE (target) == MEM && RTX_UNCHANGING_P (target))
3390 add_function_usage_to
3391 (last_call_insn (),
3392 gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_CLOBBER (VOIDmode, target),
3393 NULL_RTX));
3395 insns = get_insns ();
3396 end_sequence ();
3398 if (pass == 0)
3400 tail_call_insns = insns;
3402 /* Restore the pending stack adjustment now that we have
3403 finished generating the sibling call sequence. */
3405 pending_stack_adjust = save_pending_stack_adjust;
3406 stack_pointer_delta = save_stack_pointer_delta;
3408 /* Prepare arg structure for next iteration. */
3409 for (i = 0; i < num_actuals; i++)
3411 args[i].value = 0;
3412 args[i].aligned_regs = 0;
3413 args[i].stack = 0;
3416 sbitmap_free (stored_args_map);
3418 else
3420 normal_call_insns = insns;
3422 /* Verify that we've deallocated all the stack we used. */
3423 if (old_stack_allocated !=
3424 stack_pointer_delta - pending_stack_adjust)
3425 abort ();
3428 /* If something prevents making this a sibling call,
3429 zero out the sequence. */
3430 if (sibcall_failure)
3431 tail_call_insns = NULL_RTX;
3434 /* The function optimize_sibling_and_tail_recursive_calls doesn't
3435 handle CALL_PLACEHOLDERs inside other CALL_PLACEHOLDERs. This
3436 can happen if the arguments to this function call an inline
3437 function who's expansion contains another CALL_PLACEHOLDER.
3439 If there are any C_Ps in any of these sequences, replace them
3440 with their normal call. */
3442 for (insn = normal_call_insns; insn; insn = NEXT_INSN (insn))
3443 if (GET_CODE (insn) == CALL_INSN
3444 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3445 replace_call_placeholder (insn, sibcall_use_normal);
3447 for (insn = tail_call_insns; insn; insn = NEXT_INSN (insn))
3448 if (GET_CODE (insn) == CALL_INSN
3449 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3450 replace_call_placeholder (insn, sibcall_use_normal);
3452 for (insn = tail_recursion_insns; insn; insn = NEXT_INSN (insn))
3453 if (GET_CODE (insn) == CALL_INSN
3454 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3455 replace_call_placeholder (insn, sibcall_use_normal);
3457 /* If this was a potential tail recursion site, then emit a
3458 CALL_PLACEHOLDER with the normal and the tail recursion streams.
3459 One of them will be selected later. */
3460 if (tail_recursion_insns || tail_call_insns)
3462 /* The tail recursion label must be kept around. We could expose
3463 its use in the CALL_PLACEHOLDER, but that creates unwanted edges
3464 and makes determining true tail recursion sites difficult.
3466 So we set LABEL_PRESERVE_P here, then clear it when we select
3467 one of the call sequences after rtl generation is complete. */
3468 if (tail_recursion_insns)
3469 LABEL_PRESERVE_P (tail_recursion_label) = 1;
3470 emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode, normal_call_insns,
3471 tail_call_insns,
3472 tail_recursion_insns,
3473 tail_recursion_label));
3475 else
3476 emit_insn (normal_call_insns);
3478 currently_expanding_call--;
3480 /* If this function returns with the stack pointer depressed, ensure
3481 this block saves and restores the stack pointer, show it was
3482 changed, and adjust for any outgoing arg space. */
3483 if (flags & ECF_SP_DEPRESSED)
3485 clear_pending_stack_adjust ();
3486 emit_insn (gen_rtx (CLOBBER, VOIDmode, stack_pointer_rtx));
3487 emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3488 save_stack_pointer ();
3491 return target;
3494 /* Traverse an argument list in VALUES and expand all complex
3495 arguments into their components. */
3496 tree
3497 split_complex_values (tree values)
3499 tree p;
3501 values = copy_list (values);
3503 for (p = values; p; p = TREE_CHAIN (p))
3505 tree complex_value = TREE_VALUE (p);
3506 tree complex_type;
3508 complex_type = TREE_TYPE (complex_value);
3509 if (!complex_type)
3510 continue;
3512 if (TREE_CODE (complex_type) == COMPLEX_TYPE)
3514 tree subtype;
3515 tree real, imag, next;
3517 subtype = TREE_TYPE (complex_type);
3518 complex_value = save_expr (complex_value);
3519 real = build1 (REALPART_EXPR, subtype, complex_value);
3520 imag = build1 (IMAGPART_EXPR, subtype, complex_value);
3522 TREE_VALUE (p) = real;
3523 next = TREE_CHAIN (p);
3524 imag = build_tree_list (NULL_TREE, imag);
3525 TREE_CHAIN (p) = imag;
3526 TREE_CHAIN (imag) = next;
3528 /* Skip the newly created node. */
3529 p = TREE_CHAIN (p);
3533 return values;
3536 /* Traverse a list of TYPES and expand all complex types into their
3537 components. */
3538 tree
3539 split_complex_types (tree types)
3541 tree p;
3543 types = copy_list (types);
3545 for (p = types; p; p = TREE_CHAIN (p))
3547 tree complex_type = TREE_VALUE (p);
3549 if (TREE_CODE (complex_type) == COMPLEX_TYPE)
3551 tree next, imag;
3553 /* Rewrite complex type with component type. */
3554 TREE_VALUE (p) = TREE_TYPE (complex_type);
3555 next = TREE_CHAIN (p);
3557 /* Add another component type for the imaginary part. */
3558 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3559 TREE_CHAIN (p) = imag;
3560 TREE_CHAIN (imag) = next;
3562 /* Skip the newly created node. */
3563 p = TREE_CHAIN (p);
3567 return types;
3570 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3571 The RETVAL parameter specifies whether return value needs to be saved, other
3572 parameters are documented in the emit_library_call function below. */
3574 static rtx
3575 emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p)
3576 int retval;
3577 rtx orgfun;
3578 rtx value;
3579 enum libcall_type fn_type;
3580 enum machine_mode outmode;
3581 int nargs;
3582 va_list p;
3584 /* Total size in bytes of all the stack-parms scanned so far. */
3585 struct args_size args_size;
3586 /* Size of arguments before any adjustments (such as rounding). */
3587 struct args_size original_args_size;
3588 int argnum;
3589 rtx fun;
3590 int inc;
3591 int count;
3592 rtx argblock = 0;
3593 CUMULATIVE_ARGS args_so_far;
3594 struct arg
3596 rtx value;
3597 enum machine_mode mode;
3598 rtx reg;
3599 int partial;
3600 struct locate_and_pad_arg_data locate;
3601 rtx save_area;
3603 struct arg *argvec;
3604 int old_inhibit_defer_pop = inhibit_defer_pop;
3605 rtx call_fusage = 0;
3606 rtx mem_value = 0;
3607 rtx valreg;
3608 int pcc_struct_value = 0;
3609 int struct_value_size = 0;
3610 int flags;
3611 int reg_parm_stack_space = 0;
3612 int needed;
3613 rtx before_call;
3614 tree tfom; /* type_for_mode (outmode, 0) */
3616 #ifdef REG_PARM_STACK_SPACE
3617 /* Define the boundary of the register parm stack space that needs to be
3618 save, if any. */
3619 int low_to_save, high_to_save;
3620 rtx save_area = 0; /* Place that it is saved. */
3621 #endif
3623 /* Size of the stack reserved for parameter registers. */
3624 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3625 char *initial_stack_usage_map = stack_usage_map;
3627 #ifdef REG_PARM_STACK_SPACE
3628 #ifdef MAYBE_REG_PARM_STACK_SPACE
3629 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3630 #else
3631 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3632 #endif
3633 #endif
3635 /* By default, library functions can not throw. */
3636 flags = ECF_NOTHROW;
3638 switch (fn_type)
3640 case LCT_NORMAL:
3641 break;
3642 case LCT_CONST:
3643 flags |= ECF_CONST;
3644 break;
3645 case LCT_PURE:
3646 flags |= ECF_PURE;
3647 break;
3648 case LCT_CONST_MAKE_BLOCK:
3649 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
3650 break;
3651 case LCT_PURE_MAKE_BLOCK:
3652 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
3653 break;
3654 case LCT_NORETURN:
3655 flags |= ECF_NORETURN;
3656 break;
3657 case LCT_THROW:
3658 flags = ECF_NORETURN;
3659 break;
3660 case LCT_ALWAYS_RETURN:
3661 flags = ECF_ALWAYS_RETURN;
3662 break;
3663 case LCT_RETURNS_TWICE:
3664 flags = ECF_RETURNS_TWICE;
3665 break;
3667 fun = orgfun;
3669 /* Ensure current function's preferred stack boundary is at least
3670 what we need. */
3671 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3672 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3674 /* If this kind of value comes back in memory,
3675 decide where in memory it should come back. */
3676 if (outmode != VOIDmode)
3678 tfom = (*lang_hooks.types.type_for_mode) (outmode, 0);
3679 if (aggregate_value_p (tfom))
3681 #ifdef PCC_STATIC_STRUCT_RETURN
3682 rtx pointer_reg
3683 = hard_function_value (build_pointer_type (tfom), 0, 0);
3684 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3685 pcc_struct_value = 1;
3686 if (value == 0)
3687 value = gen_reg_rtx (outmode);
3688 #else /* not PCC_STATIC_STRUCT_RETURN */
3689 struct_value_size = GET_MODE_SIZE (outmode);
3690 if (value != 0 && GET_CODE (value) == MEM)
3691 mem_value = value;
3692 else
3693 mem_value = assign_temp (tfom, 0, 1, 1);
3694 #endif
3695 /* This call returns a big structure. */
3696 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3699 else
3700 tfom = void_type_node;
3702 /* ??? Unfinished: must pass the memory address as an argument. */
3704 /* Copy all the libcall-arguments out of the varargs data
3705 and into a vector ARGVEC.
3707 Compute how to pass each argument. We only support a very small subset
3708 of the full argument passing conventions to limit complexity here since
3709 library functions shouldn't have many args. */
3711 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
3712 memset ((char *) argvec, 0, (nargs + 1) * sizeof (struct arg));
3714 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3715 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3716 #else
3717 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3718 #endif
3720 args_size.constant = 0;
3721 args_size.var = 0;
3723 count = 0;
3725 /* Now we are about to start emitting insns that can be deleted
3726 if a libcall is deleted. */
3727 if (flags & ECF_LIBCALL_BLOCK)
3728 start_sequence ();
3730 push_temp_slots ();
3732 /* If there's a structure value address to be passed,
3733 either pass it in the special place, or pass it as an extra argument. */
3734 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3736 rtx addr = XEXP (mem_value, 0);
3737 nargs++;
3739 /* Make sure it is a reasonable operand for a move or push insn. */
3740 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3741 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3742 addr = force_operand (addr, NULL_RTX);
3744 argvec[count].value = addr;
3745 argvec[count].mode = Pmode;
3746 argvec[count].partial = 0;
3748 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3749 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3750 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3751 abort ();
3752 #endif
3754 locate_and_pad_parm (Pmode, NULL_TREE,
3755 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3757 #else
3758 argvec[count].reg != 0,
3759 #endif
3760 0, NULL_TREE, &args_size, &argvec[count].locate);
3762 if (argvec[count].reg == 0 || argvec[count].partial != 0
3763 || reg_parm_stack_space > 0)
3764 args_size.constant += argvec[count].locate.size.constant;
3766 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3768 count++;
3771 for (; count < nargs; count++)
3773 rtx val = va_arg (p, rtx);
3774 enum machine_mode mode = va_arg (p, enum machine_mode);
3776 /* We cannot convert the arg value to the mode the library wants here;
3777 must do it earlier where we know the signedness of the arg. */
3778 if (mode == BLKmode
3779 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3780 abort ();
3782 /* On some machines, there's no way to pass a float to a library fcn.
3783 Pass it as a double instead. */
3784 #ifdef LIBGCC_NEEDS_DOUBLE
3785 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3786 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3787 #endif
3789 /* There's no need to call protect_from_queue, because
3790 either emit_move_insn or emit_push_insn will do that. */
3792 /* Make sure it is a reasonable operand for a move or push insn. */
3793 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3794 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3795 val = force_operand (val, NULL_RTX);
3797 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3798 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3800 rtx slot;
3801 int must_copy = 1
3802 #ifdef FUNCTION_ARG_CALLEE_COPIES
3803 && ! FUNCTION_ARG_CALLEE_COPIES (args_so_far, mode,
3804 NULL_TREE, 1)
3805 #endif
3808 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3809 functions, so we have to pretend this isn't such a function. */
3810 if (flags & ECF_LIBCALL_BLOCK)
3812 rtx insns = get_insns ();
3813 end_sequence ();
3814 emit_insn (insns);
3816 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3818 /* If this was a CONST function, it is now PURE since
3819 it now reads memory. */
3820 if (flags & ECF_CONST)
3822 flags &= ~ECF_CONST;
3823 flags |= ECF_PURE;
3826 if (GET_MODE (val) == MEM && ! must_copy)
3827 slot = val;
3828 else if (must_copy)
3830 slot = assign_temp ((*lang_hooks.types.type_for_mode) (mode, 0),
3831 0, 1, 1);
3832 emit_move_insn (slot, val);
3834 else
3836 tree type = (*lang_hooks.types.type_for_mode) (mode, 0);
3838 slot
3839 = gen_rtx_MEM (mode,
3840 expand_expr (build1 (ADDR_EXPR,
3841 build_pointer_type (type),
3842 make_tree (type, val)),
3843 NULL_RTX, VOIDmode, 0));
3846 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3847 gen_rtx_USE (VOIDmode, slot),
3848 call_fusage);
3849 if (must_copy)
3850 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3851 gen_rtx_CLOBBER (VOIDmode,
3852 slot),
3853 call_fusage);
3855 mode = Pmode;
3856 val = force_operand (XEXP (slot, 0), NULL_RTX);
3858 #endif
3860 argvec[count].value = val;
3861 argvec[count].mode = mode;
3863 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3865 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3866 argvec[count].partial
3867 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3868 #else
3869 argvec[count].partial = 0;
3870 #endif
3872 locate_and_pad_parm (mode, NULL_TREE,
3873 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3875 #else
3876 argvec[count].reg != 0,
3877 #endif
3878 argvec[count].partial,
3879 NULL_TREE, &args_size, &argvec[count].locate);
3881 if (argvec[count].locate.size.var)
3882 abort ();
3884 if (argvec[count].reg == 0 || argvec[count].partial != 0
3885 || reg_parm_stack_space > 0)
3886 args_size.constant += argvec[count].locate.size.constant;
3888 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3891 #ifdef FINAL_REG_PARM_STACK_SPACE
3892 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3893 args_size.var);
3894 #endif
3895 /* If this machine requires an external definition for library
3896 functions, write one out. */
3897 assemble_external_libcall (fun);
3899 original_args_size = args_size;
3900 args_size.constant = (((args_size.constant
3901 + stack_pointer_delta
3902 + STACK_BYTES - 1)
3903 / STACK_BYTES
3904 * STACK_BYTES)
3905 - stack_pointer_delta);
3907 args_size.constant = MAX (args_size.constant,
3908 reg_parm_stack_space);
3910 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3911 args_size.constant -= reg_parm_stack_space;
3912 #endif
3914 if (args_size.constant > current_function_outgoing_args_size)
3915 current_function_outgoing_args_size = args_size.constant;
3917 if (ACCUMULATE_OUTGOING_ARGS)
3919 /* Since the stack pointer will never be pushed, it is possible for
3920 the evaluation of a parm to clobber something we have already
3921 written to the stack. Since most function calls on RISC machines
3922 do not use the stack, this is uncommon, but must work correctly.
3924 Therefore, we save any area of the stack that was already written
3925 and that we are using. Here we set up to do this by making a new
3926 stack usage map from the old one.
3928 Another approach might be to try to reorder the argument
3929 evaluations to avoid this conflicting stack usage. */
3931 needed = args_size.constant;
3933 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3934 /* Since we will be writing into the entire argument area, the
3935 map must be allocated for its entire size, not just the part that
3936 is the responsibility of the caller. */
3937 needed += reg_parm_stack_space;
3938 #endif
3940 #ifdef ARGS_GROW_DOWNWARD
3941 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3942 needed + 1);
3943 #else
3944 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3945 needed);
3946 #endif
3947 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3949 if (initial_highest_arg_in_use)
3950 memcpy (stack_usage_map, initial_stack_usage_map,
3951 initial_highest_arg_in_use);
3953 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3954 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3955 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3956 needed = 0;
3958 /* We must be careful to use virtual regs before they're instantiated,
3959 and real regs afterwards. Loop optimization, for example, can create
3960 new libcalls after we've instantiated the virtual regs, and if we
3961 use virtuals anyway, they won't match the rtl patterns. */
3963 if (virtuals_instantiated)
3964 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3965 else
3966 argblock = virtual_outgoing_args_rtx;
3968 else
3970 if (!PUSH_ARGS)
3971 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3974 /* If we push args individually in reverse order, perform stack alignment
3975 before the first push (the last arg). */
3976 if (argblock == 0 && PUSH_ARGS_REVERSED)
3977 anti_adjust_stack (GEN_INT (args_size.constant
3978 - original_args_size.constant));
3980 if (PUSH_ARGS_REVERSED)
3982 inc = -1;
3983 argnum = nargs - 1;
3985 else
3987 inc = 1;
3988 argnum = 0;
3991 #ifdef REG_PARM_STACK_SPACE
3992 if (ACCUMULATE_OUTGOING_ARGS)
3994 /* The argument list is the property of the called routine and it
3995 may clobber it. If the fixed area has been used for previous
3996 parameters, we must save and restore it. */
3997 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3998 &low_to_save, &high_to_save);
4000 #endif
4002 /* Push the args that need to be pushed. */
4004 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4005 are to be pushed. */
4006 for (count = 0; count < nargs; count++, argnum += inc)
4008 enum machine_mode mode = argvec[argnum].mode;
4009 rtx val = argvec[argnum].value;
4010 rtx reg = argvec[argnum].reg;
4011 int partial = argvec[argnum].partial;
4012 int lower_bound = 0, upper_bound = 0, i;
4014 if (! (reg != 0 && partial == 0))
4016 if (ACCUMULATE_OUTGOING_ARGS)
4018 /* If this is being stored into a pre-allocated, fixed-size,
4019 stack area, save any previous data at that location. */
4021 #ifdef ARGS_GROW_DOWNWARD
4022 /* stack_slot is negative, but we want to index stack_usage_map
4023 with positive values. */
4024 upper_bound = -argvec[argnum].locate.offset.constant + 1;
4025 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
4026 #else
4027 lower_bound = argvec[argnum].locate.offset.constant;
4028 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
4029 #endif
4031 i = lower_bound;
4032 /* Don't worry about things in the fixed argument area;
4033 it has already been saved. */
4034 if (i < reg_parm_stack_space)
4035 i = reg_parm_stack_space;
4036 while (i < upper_bound && stack_usage_map[i] == 0)
4037 i++;
4039 if (i < upper_bound)
4041 /* We need to make a save area. */
4042 unsigned int size
4043 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
4044 enum machine_mode save_mode
4045 = mode_for_size (size, MODE_INT, 1);
4046 rtx adr
4047 = plus_constant (argblock,
4048 argvec[argnum].locate.offset.constant);
4049 rtx stack_area
4050 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
4051 argvec[argnum].save_area = gen_reg_rtx (save_mode);
4053 emit_move_insn (argvec[argnum].save_area, stack_area);
4057 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
4058 partial, reg, 0, argblock,
4059 GEN_INT (argvec[argnum].locate.offset.constant),
4060 reg_parm_stack_space,
4061 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
4063 /* Now mark the segment we just used. */
4064 if (ACCUMULATE_OUTGOING_ARGS)
4065 for (i = lower_bound; i < upper_bound; i++)
4066 stack_usage_map[i] = 1;
4068 NO_DEFER_POP;
4072 /* If we pushed args in forward order, perform stack alignment
4073 after pushing the last arg. */
4074 if (argblock == 0 && !PUSH_ARGS_REVERSED)
4075 anti_adjust_stack (GEN_INT (args_size.constant
4076 - original_args_size.constant));
4078 if (PUSH_ARGS_REVERSED)
4079 argnum = nargs - 1;
4080 else
4081 argnum = 0;
4083 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0, 0);
4085 /* Now load any reg parms into their regs. */
4087 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4088 are to be pushed. */
4089 for (count = 0; count < nargs; count++, argnum += inc)
4091 rtx val = argvec[argnum].value;
4092 rtx reg = argvec[argnum].reg;
4093 int partial = argvec[argnum].partial;
4095 /* Handle calls that pass values in multiple non-contiguous
4096 locations. The PA64 has examples of this for library calls. */
4097 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4098 emit_group_load (reg, val, GET_MODE_SIZE (GET_MODE (val)));
4099 else if (reg != 0 && partial == 0)
4100 emit_move_insn (reg, val);
4102 NO_DEFER_POP;
4105 /* Any regs containing parms remain in use through the call. */
4106 for (count = 0; count < nargs; count++)
4108 rtx reg = argvec[count].reg;
4109 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4110 use_group_regs (&call_fusage, reg);
4111 else if (reg != 0)
4112 use_reg (&call_fusage, reg);
4115 /* Pass the function the address in which to return a structure value. */
4116 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
4118 emit_move_insn (struct_value_rtx,
4119 force_reg (Pmode,
4120 force_operand (XEXP (mem_value, 0),
4121 NULL_RTX)));
4122 if (GET_CODE (struct_value_rtx) == REG)
4123 use_reg (&call_fusage, struct_value_rtx);
4126 /* Don't allow popping to be deferred, since then
4127 cse'ing of library calls could delete a call and leave the pop. */
4128 NO_DEFER_POP;
4129 valreg = (mem_value == 0 && outmode != VOIDmode
4130 ? hard_libcall_value (outmode) : NULL_RTX);
4132 /* Stack must be properly aligned now. */
4133 if (stack_pointer_delta & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))
4134 abort ();
4136 before_call = get_last_insn ();
4138 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4139 will set inhibit_defer_pop to that value. */
4140 /* The return type is needed to decide how many bytes the function pops.
4141 Signedness plays no role in that, so for simplicity, we pretend it's
4142 always signed. We also assume that the list of arguments passed has
4143 no impact, so we pretend it is unknown. */
4145 emit_call_1 (fun,
4146 get_identifier (XSTR (orgfun, 0)),
4147 build_function_type (tfom, NULL_TREE),
4148 original_args_size.constant, args_size.constant,
4149 struct_value_size,
4150 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
4151 valreg,
4152 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
4154 /* For calls to `setjmp', etc., inform flow.c it should complain
4155 if nonvolatile values are live. For functions that cannot return,
4156 inform flow that control does not fall through. */
4158 if (flags & (ECF_NORETURN | ECF_LONGJMP))
4160 /* The barrier note must be emitted
4161 immediately after the CALL_INSN. Some ports emit more than
4162 just a CALL_INSN above, so we must search for it here. */
4164 rtx last = get_last_insn ();
4165 while (GET_CODE (last) != CALL_INSN)
4167 last = PREV_INSN (last);
4168 /* There was no CALL_INSN? */
4169 if (last == before_call)
4170 abort ();
4173 emit_barrier_after (last);
4176 /* Now restore inhibit_defer_pop to its actual original value. */
4177 OK_DEFER_POP;
4179 /* If call is cse'able, make appropriate pair of reg-notes around it.
4180 Test valreg so we don't crash; may safely ignore `const'
4181 if return type is void. Disable for PARALLEL return values, because
4182 we have no way to move such values into a pseudo register. */
4183 if (flags & ECF_LIBCALL_BLOCK)
4185 rtx insns;
4187 if (valreg == 0)
4189 insns = get_insns ();
4190 end_sequence ();
4191 emit_insn (insns);
4193 else
4195 rtx note = 0;
4196 rtx temp;
4197 int i;
4199 if (GET_CODE (valreg) == PARALLEL)
4201 temp = gen_reg_rtx (outmode);
4202 emit_group_store (temp, valreg, outmode);
4203 valreg = temp;
4206 temp = gen_reg_rtx (GET_MODE (valreg));
4208 /* Construct an "equal form" for the value which mentions all the
4209 arguments in order as well as the function name. */
4210 for (i = 0; i < nargs; i++)
4211 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
4212 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
4214 insns = get_insns ();
4215 end_sequence ();
4217 if (flags & ECF_PURE)
4218 note = gen_rtx_EXPR_LIST (VOIDmode,
4219 gen_rtx_USE (VOIDmode,
4220 gen_rtx_MEM (BLKmode,
4221 gen_rtx_SCRATCH (VOIDmode))),
4222 note);
4224 emit_libcall_block (insns, temp, valreg, note);
4226 valreg = temp;
4229 pop_temp_slots ();
4231 /* Copy the value to the right place. */
4232 if (outmode != VOIDmode && retval)
4234 if (mem_value)
4236 if (value == 0)
4237 value = mem_value;
4238 if (value != mem_value)
4239 emit_move_insn (value, mem_value);
4241 else if (GET_CODE (valreg) == PARALLEL)
4243 if (value == 0)
4244 value = gen_reg_rtx (outmode);
4245 emit_group_store (value, valreg, outmode);
4247 else if (value != 0)
4248 emit_move_insn (value, valreg);
4249 else
4250 value = valreg;
4253 if (ACCUMULATE_OUTGOING_ARGS)
4255 #ifdef REG_PARM_STACK_SPACE
4256 if (save_area)
4257 restore_fixed_argument_area (save_area, argblock,
4258 high_to_save, low_to_save);
4259 #endif
4261 /* If we saved any argument areas, restore them. */
4262 for (count = 0; count < nargs; count++)
4263 if (argvec[count].save_area)
4265 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
4266 rtx adr = plus_constant (argblock,
4267 argvec[count].locate.offset.constant);
4268 rtx stack_area = gen_rtx_MEM (save_mode,
4269 memory_address (save_mode, adr));
4271 emit_move_insn (stack_area, argvec[count].save_area);
4274 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4275 stack_usage_map = initial_stack_usage_map;
4278 return value;
4282 /* Output a library call to function FUN (a SYMBOL_REF rtx)
4283 (emitting the queue unless NO_QUEUE is nonzero),
4284 for a value of mode OUTMODE,
4285 with NARGS different arguments, passed as alternating rtx values
4286 and machine_modes to convert them to.
4287 The rtx values should have been passed through protect_from_queue already.
4289 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
4290 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
4291 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
4292 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
4293 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
4294 or other LCT_ value for other types of library calls. */
4296 void
4297 emit_library_call (rtx orgfun, enum libcall_type fn_type,
4298 enum machine_mode outmode, int nargs, ...)
4300 va_list p;
4302 va_start (p, nargs);
4303 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4304 va_end (p);
4307 /* Like emit_library_call except that an extra argument, VALUE,
4308 comes second and says where to store the result.
4309 (If VALUE is zero, this function chooses a convenient way
4310 to return the value.
4312 This function returns an rtx for where the value is to be found.
4313 If VALUE is nonzero, VALUE is returned. */
4316 emit_library_call_value (rtx orgfun, rtx value,
4317 enum libcall_type fn_type,
4318 enum machine_mode outmode, int nargs, ...)
4320 rtx result;
4321 va_list p;
4323 va_start (p, nargs);
4324 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4325 nargs, p);
4326 va_end (p);
4328 return result;
4331 /* Store a single argument for a function call
4332 into the register or memory area where it must be passed.
4333 *ARG describes the argument value and where to pass it.
4335 ARGBLOCK is the address of the stack-block for all the arguments,
4336 or 0 on a machine where arguments are pushed individually.
4338 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4339 so must be careful about how the stack is used.
4341 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4342 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4343 that we need not worry about saving and restoring the stack.
4345 FNDECL is the declaration of the function we are calling.
4347 Return nonzero if this arg should cause sibcall failure,
4348 zero otherwise. */
4350 static int
4351 store_one_arg (arg, argblock, flags, variable_size, reg_parm_stack_space)
4352 struct arg_data *arg;
4353 rtx argblock;
4354 int flags;
4355 int variable_size ATTRIBUTE_UNUSED;
4356 int reg_parm_stack_space;
4358 tree pval = arg->tree_value;
4359 rtx reg = 0;
4360 int partial = 0;
4361 int used = 0;
4362 int i, lower_bound = 0, upper_bound = 0;
4363 int sibcall_failure = 0;
4365 if (TREE_CODE (pval) == ERROR_MARK)
4366 return 1;
4368 /* Push a new temporary level for any temporaries we make for
4369 this argument. */
4370 push_temp_slots ();
4372 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4374 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4375 save any previous data at that location. */
4376 if (argblock && ! variable_size && arg->stack)
4378 #ifdef ARGS_GROW_DOWNWARD
4379 /* stack_slot is negative, but we want to index stack_usage_map
4380 with positive values. */
4381 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4382 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4383 else
4384 upper_bound = 0;
4386 lower_bound = upper_bound - arg->locate.size.constant;
4387 #else
4388 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4389 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4390 else
4391 lower_bound = 0;
4393 upper_bound = lower_bound + arg->locate.size.constant;
4394 #endif
4396 i = lower_bound;
4397 /* Don't worry about things in the fixed argument area;
4398 it has already been saved. */
4399 if (i < reg_parm_stack_space)
4400 i = reg_parm_stack_space;
4401 while (i < upper_bound && stack_usage_map[i] == 0)
4402 i++;
4404 if (i < upper_bound)
4406 /* We need to make a save area. */
4407 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4408 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4409 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4410 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4412 if (save_mode == BLKmode)
4414 tree ot = TREE_TYPE (arg->tree_value);
4415 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4416 | TYPE_QUAL_CONST));
4418 arg->save_area = assign_temp (nt, 0, 1, 1);
4419 preserve_temp_slots (arg->save_area);
4420 emit_block_move (validize_mem (arg->save_area), stack_area,
4421 expr_size (arg->tree_value),
4422 BLOCK_OP_CALL_PARM);
4424 else
4426 arg->save_area = gen_reg_rtx (save_mode);
4427 emit_move_insn (arg->save_area, stack_area);
4433 /* If this isn't going to be placed on both the stack and in registers,
4434 set up the register and number of words. */
4435 if (! arg->pass_on_stack)
4437 if (flags & ECF_SIBCALL)
4438 reg = arg->tail_call_reg;
4439 else
4440 reg = arg->reg;
4441 partial = arg->partial;
4444 if (reg != 0 && partial == 0)
4445 /* Being passed entirely in a register. We shouldn't be called in
4446 this case. */
4447 abort ();
4449 /* If this arg needs special alignment, don't load the registers
4450 here. */
4451 if (arg->n_aligned_regs != 0)
4452 reg = 0;
4454 /* If this is being passed partially in a register, we can't evaluate
4455 it directly into its stack slot. Otherwise, we can. */
4456 if (arg->value == 0)
4458 /* stack_arg_under_construction is nonzero if a function argument is
4459 being evaluated directly into the outgoing argument list and
4460 expand_call must take special action to preserve the argument list
4461 if it is called recursively.
4463 For scalar function arguments stack_usage_map is sufficient to
4464 determine which stack slots must be saved and restored. Scalar
4465 arguments in general have pass_on_stack == 0.
4467 If this argument is initialized by a function which takes the
4468 address of the argument (a C++ constructor or a C function
4469 returning a BLKmode structure), then stack_usage_map is
4470 insufficient and expand_call must push the stack around the
4471 function call. Such arguments have pass_on_stack == 1.
4473 Note that it is always safe to set stack_arg_under_construction,
4474 but this generates suboptimal code if set when not needed. */
4476 if (arg->pass_on_stack)
4477 stack_arg_under_construction++;
4479 arg->value = expand_expr (pval,
4480 (partial
4481 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4482 ? NULL_RTX : arg->stack,
4483 VOIDmode, EXPAND_STACK_PARM);
4485 /* If we are promoting object (or for any other reason) the mode
4486 doesn't agree, convert the mode. */
4488 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4489 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4490 arg->value, arg->unsignedp);
4492 if (arg->pass_on_stack)
4493 stack_arg_under_construction--;
4496 /* Don't allow anything left on stack from computation
4497 of argument to alloca. */
4498 if (flags & ECF_MAY_BE_ALLOCA)
4499 do_pending_stack_adjust ();
4501 if (arg->value == arg->stack)
4502 /* If the value is already in the stack slot, we are done. */
4504 else if (arg->mode != BLKmode)
4506 int size;
4508 /* Argument is a scalar, not entirely passed in registers.
4509 (If part is passed in registers, arg->partial says how much
4510 and emit_push_insn will take care of putting it there.)
4512 Push it, and if its size is less than the
4513 amount of space allocated to it,
4514 also bump stack pointer by the additional space.
4515 Note that in C the default argument promotions
4516 will prevent such mismatches. */
4518 size = GET_MODE_SIZE (arg->mode);
4519 /* Compute how much space the push instruction will push.
4520 On many machines, pushing a byte will advance the stack
4521 pointer by a halfword. */
4522 #ifdef PUSH_ROUNDING
4523 size = PUSH_ROUNDING (size);
4524 #endif
4525 used = size;
4527 /* Compute how much space the argument should get:
4528 round up to a multiple of the alignment for arguments. */
4529 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4530 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4531 / (PARM_BOUNDARY / BITS_PER_UNIT))
4532 * (PARM_BOUNDARY / BITS_PER_UNIT));
4534 /* This isn't already where we want it on the stack, so put it there.
4535 This can either be done with push or copy insns. */
4536 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4537 PARM_BOUNDARY, partial, reg, used - size, argblock,
4538 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4539 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4541 /* Unless this is a partially-in-register argument, the argument is now
4542 in the stack. */
4543 if (partial == 0)
4544 arg->value = arg->stack;
4546 else
4548 /* BLKmode, at least partly to be pushed. */
4550 unsigned int parm_align;
4551 int excess;
4552 rtx size_rtx;
4554 /* Pushing a nonscalar.
4555 If part is passed in registers, PARTIAL says how much
4556 and emit_push_insn will take care of putting it there. */
4558 /* Round its size up to a multiple
4559 of the allocation unit for arguments. */
4561 if (arg->locate.size.var != 0)
4563 excess = 0;
4564 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4566 else
4568 /* PUSH_ROUNDING has no effect on us, because
4569 emit_push_insn for BLKmode is careful to avoid it. */
4570 excess = (arg->locate.size.constant
4571 - int_size_in_bytes (TREE_TYPE (pval))
4572 + partial * UNITS_PER_WORD);
4573 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4574 NULL_RTX, TYPE_MODE (sizetype), 0);
4577 /* Some types will require stricter alignment, which will be
4578 provided for elsewhere in argument layout. */
4579 parm_align = MAX (PARM_BOUNDARY, TYPE_ALIGN (TREE_TYPE (pval)));
4581 /* When an argument is padded down, the block is aligned to
4582 PARM_BOUNDARY, but the actual argument isn't. */
4583 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4585 if (arg->locate.size.var)
4586 parm_align = BITS_PER_UNIT;
4587 else if (excess)
4589 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4590 parm_align = MIN (parm_align, excess_align);
4594 if ((flags & ECF_SIBCALL) && GET_CODE (arg->value) == MEM)
4596 /* emit_push_insn might not work properly if arg->value and
4597 argblock + arg->locate.offset areas overlap. */
4598 rtx x = arg->value;
4599 int i = 0;
4601 if (XEXP (x, 0) == current_function_internal_arg_pointer
4602 || (GET_CODE (XEXP (x, 0)) == PLUS
4603 && XEXP (XEXP (x, 0), 0) ==
4604 current_function_internal_arg_pointer
4605 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4607 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4608 i = INTVAL (XEXP (XEXP (x, 0), 1));
4610 /* expand_call should ensure this */
4611 if (arg->locate.offset.var || GET_CODE (size_rtx) != CONST_INT)
4612 abort ();
4614 if (arg->locate.offset.constant > i)
4616 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4617 sibcall_failure = 1;
4619 else if (arg->locate.offset.constant < i)
4621 if (i < arg->locate.offset.constant + INTVAL (size_rtx))
4622 sibcall_failure = 1;
4627 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4628 parm_align, partial, reg, excess, argblock,
4629 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4630 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4632 /* Unless this is a partially-in-register argument, the argument is now
4633 in the stack.
4635 ??? Unlike the case above, in which we want the actual
4636 address of the data, so that we can load it directly into a
4637 register, here we want the address of the stack slot, so that
4638 it's properly aligned for word-by-word copying or something
4639 like that. It's not clear that this is always correct. */
4640 if (partial == 0)
4641 arg->value = arg->stack_slot;
4644 /* Mark all slots this store used. */
4645 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4646 && argblock && ! variable_size && arg->stack)
4647 for (i = lower_bound; i < upper_bound; i++)
4648 stack_usage_map[i] = 1;
4650 /* Once we have pushed something, pops can't safely
4651 be deferred during the rest of the arguments. */
4652 NO_DEFER_POP;
4654 /* ANSI doesn't require a sequence point here,
4655 but PCC has one, so this will avoid some problems. */
4656 emit_queue ();
4658 /* Free any temporary slots made in processing this argument. Show
4659 that we might have taken the address of something and pushed that
4660 as an operand. */
4661 preserve_temp_slots (NULL_RTX);
4662 free_temp_slots ();
4663 pop_temp_slots ();
4665 return sibcall_failure;
4668 /* Nonzero if we do not know how to pass TYPE solely in registers.
4669 We cannot do so in the following cases:
4671 - if the type has variable size
4672 - if the type is marked as addressable (it is required to be constructed
4673 into the stack)
4674 - if the padding and mode of the type is such that a copy into a register
4675 would put it into the wrong part of the register.
4677 Which padding can't be supported depends on the byte endianness.
4679 A value in a register is implicitly padded at the most significant end.
4680 On a big-endian machine, that is the lower end in memory.
4681 So a value padded in memory at the upper end can't go in a register.
4682 For a little-endian machine, the reverse is true. */
4684 bool
4685 default_must_pass_in_stack (mode, type)
4686 enum machine_mode mode;
4687 tree type;
4689 if (!type)
4690 return false;
4692 /* If the type has variable size... */
4693 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4694 return true;
4696 /* If the type is marked as addressable (it is required
4697 to be constructed into the stack)... */
4698 if (TREE_ADDRESSABLE (type))
4699 return true;
4701 /* If the padding and mode of the type is such that a copy into
4702 a register would put it into the wrong part of the register. */
4703 if (mode == BLKmode
4704 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4705 && (FUNCTION_ARG_PADDING (mode, type)
4706 == (BYTES_BIG_ENDIAN ? upward : downward)))
4707 return true;
4709 return false;