* config/arm/elf.h (ASM_OUTPUT_ALIGNED_COMMON): Remove definition.
[official-gcc.git] / gcc / calls.c
blobbd889e1ae71537f4b07377202598ac21c506f70c
1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "expr.h"
30 #include "libfuncs.h"
31 #include "function.h"
32 #include "regs.h"
33 #include "toplev.h"
34 #include "output.h"
35 #include "tm_p.h"
36 #include "timevar.h"
37 #include "sbitmap.h"
38 #include "langhooks.h"
39 #include "target.h"
40 #include "cgraph.h"
41 #include "except.h"
43 /* Decide whether a function's arguments should be processed
44 from first to last or from last to first.
46 They should if the stack and args grow in opposite directions, but
47 only if we have push insns. */
49 #ifdef PUSH_ROUNDING
51 #ifndef PUSH_ARGS_REVERSED
52 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
53 #define PUSH_ARGS_REVERSED PUSH_ARGS
54 #endif
55 #endif
57 #endif
59 #ifndef PUSH_ARGS_REVERSED
60 #define PUSH_ARGS_REVERSED 0
61 #endif
63 #ifndef STACK_POINTER_OFFSET
64 #define STACK_POINTER_OFFSET 0
65 #endif
67 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
68 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
70 /* Data structure and subroutines used within expand_call. */
72 struct arg_data
74 /* Tree node for this argument. */
75 tree tree_value;
76 /* Mode for value; TYPE_MODE unless promoted. */
77 enum machine_mode mode;
78 /* Current RTL value for argument, or 0 if it isn't precomputed. */
79 rtx value;
80 /* Initially-compute RTL value for argument; only for const functions. */
81 rtx initial_value;
82 /* Register to pass this argument in, 0 if passed on stack, or an
83 PARALLEL if the arg is to be copied into multiple non-contiguous
84 registers. */
85 rtx reg;
86 /* Register to pass this argument in when generating tail call sequence.
87 This is not the same register as for normal calls on machines with
88 register windows. */
89 rtx tail_call_reg;
90 /* If REG was promoted from the actual mode of the argument expression,
91 indicates whether the promotion is sign- or zero-extended. */
92 int unsignedp;
93 /* Number of registers to use. 0 means put the whole arg in registers.
94 Also 0 if not passed in registers. */
95 int partial;
96 /* Nonzero if argument must be passed on stack.
97 Note that some arguments may be passed on the stack
98 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
99 pass_on_stack identifies arguments that *cannot* go in registers. */
100 int pass_on_stack;
101 /* Offset of this argument from beginning of stack-args. */
102 struct args_size offset;
103 /* Similar, but offset to the start of the stack slot. Different from
104 OFFSET if this arg pads downward. */
105 struct args_size slot_offset;
106 /* Size of this argument on the stack, rounded up for any padding it gets,
107 parts of the argument passed in registers do not count.
108 If REG_PARM_STACK_SPACE is defined, then register parms
109 are counted here as well. */
110 struct args_size size;
111 /* Location on the stack at which parameter should be stored. The store
112 has already been done if STACK == VALUE. */
113 rtx stack;
114 /* Location on the stack of the start of this argument slot. This can
115 differ from STACK if this arg pads downward. This location is known
116 to be aligned to FUNCTION_ARG_BOUNDARY. */
117 rtx stack_slot;
118 /* Place that this stack area has been saved, if needed. */
119 rtx save_area;
120 /* If an argument's alignment does not permit direct copying into registers,
121 copy in smaller-sized pieces into pseudos. These are stored in a
122 block pointed to by this field. The next field says how many
123 word-sized pseudos we made. */
124 rtx *aligned_regs;
125 int n_aligned_regs;
126 /* The amount that the stack pointer needs to be adjusted to
127 force alignment for the next argument. */
128 struct args_size alignment_pad;
131 /* A vector of one char per byte of stack space. A byte if nonzero if
132 the corresponding stack location has been used.
133 This vector is used to prevent a function call within an argument from
134 clobbering any stack already set up. */
135 static char *stack_usage_map;
137 /* Size of STACK_USAGE_MAP. */
138 static int highest_outgoing_arg_in_use;
140 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
141 stack location's tail call argument has been already stored into the stack.
142 This bitmap is used to prevent sibling call optimization if function tries
143 to use parent's incoming argument slots when they have been already
144 overwritten with tail call arguments. */
145 static sbitmap stored_args_map;
147 /* stack_arg_under_construction is nonzero when an argument may be
148 initialized with a constructor call (including a C function that
149 returns a BLKmode struct) and expand_call must take special action
150 to make sure the object being constructed does not overlap the
151 argument list for the constructor call. */
152 int stack_arg_under_construction;
154 static int calls_function PARAMS ((tree, int));
155 static int calls_function_1 PARAMS ((tree, int));
157 static void emit_call_1 PARAMS ((rtx, tree, tree, HOST_WIDE_INT,
158 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
159 rtx, int, rtx, int,
160 CUMULATIVE_ARGS *));
161 static void precompute_register_parameters PARAMS ((int,
162 struct arg_data *,
163 int *));
164 static int store_one_arg PARAMS ((struct arg_data *, rtx, int, int,
165 int));
166 static void store_unaligned_arguments_into_pseudos PARAMS ((struct arg_data *,
167 int));
168 static int finalize_must_preallocate PARAMS ((int, int,
169 struct arg_data *,
170 struct args_size *));
171 static void precompute_arguments PARAMS ((int, int,
172 struct arg_data *));
173 static int compute_argument_block_size PARAMS ((int,
174 struct args_size *,
175 int));
176 static void initialize_argument_information PARAMS ((int,
177 struct arg_data *,
178 struct args_size *,
179 int, tree, tree,
180 CUMULATIVE_ARGS *,
181 int, rtx *, int *,
182 int *, int *));
183 static void compute_argument_addresses PARAMS ((struct arg_data *,
184 rtx, int));
185 static rtx rtx_for_function_call PARAMS ((tree, tree));
186 static void load_register_parameters PARAMS ((struct arg_data *,
187 int, rtx *, int,
188 int, int *));
189 static rtx emit_library_call_value_1 PARAMS ((int, rtx, rtx,
190 enum libcall_type,
191 enum machine_mode,
192 int, va_list));
193 static int special_function_p PARAMS ((tree, int));
194 static rtx try_to_integrate PARAMS ((tree, tree, rtx,
195 int, tree, rtx));
196 static int check_sibcall_argument_overlap_1 PARAMS ((rtx));
197 static int check_sibcall_argument_overlap PARAMS ((rtx, struct arg_data *,
198 int));
200 static int combine_pending_stack_adjustment_and_call
201 PARAMS ((int, struct args_size *, int));
202 static tree fix_unsafe_tree PARAMS ((tree));
204 #ifdef REG_PARM_STACK_SPACE
205 static rtx save_fixed_argument_area PARAMS ((int, rtx, int *, int *));
206 static void restore_fixed_argument_area PARAMS ((rtx, rtx, int, int));
207 #endif
209 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
210 `alloca'.
212 If WHICH is 0, return 1 if EXP contains a call to any function.
213 Actually, we only need return 1 if evaluating EXP would require pushing
214 arguments on the stack, but that is too difficult to compute, so we just
215 assume any function call might require the stack. */
217 static tree calls_function_save_exprs;
219 static int
220 calls_function (exp, which)
221 tree exp;
222 int which;
224 int val;
226 calls_function_save_exprs = 0;
227 val = calls_function_1 (exp, which);
228 calls_function_save_exprs = 0;
229 return val;
232 /* Recursive function to do the work of above function. */
234 static int
235 calls_function_1 (exp, which)
236 tree exp;
237 int which;
239 int i;
240 enum tree_code code = TREE_CODE (exp);
241 int class = TREE_CODE_CLASS (code);
242 int length = first_rtl_op (code);
244 /* If this code is language-specific, we don't know what it will do. */
245 if ((int) code >= NUM_TREE_CODES)
246 return 1;
248 switch (code)
250 case CALL_EXPR:
251 if (which == 0)
252 return 1;
253 else if ((TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
254 == FUNCTION_TYPE)
255 && (TYPE_RETURNS_STACK_DEPRESSED
256 (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
257 return 1;
258 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
259 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
260 == FUNCTION_DECL)
261 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
263 & ECF_MAY_BE_ALLOCA))
264 return 1;
266 break;
268 case CONSTRUCTOR:
270 tree tem;
272 for (tem = CONSTRUCTOR_ELTS (exp); tem != 0; tem = TREE_CHAIN (tem))
273 if (calls_function_1 (TREE_VALUE (tem), which))
274 return 1;
277 return 0;
279 case SAVE_EXPR:
280 if (SAVE_EXPR_RTL (exp) != 0)
281 return 0;
282 if (value_member (exp, calls_function_save_exprs))
283 return 0;
284 calls_function_save_exprs = tree_cons (NULL_TREE, exp,
285 calls_function_save_exprs);
286 return (TREE_OPERAND (exp, 0) != 0
287 && calls_function_1 (TREE_OPERAND (exp, 0), which));
289 case BLOCK:
291 tree local;
292 tree subblock;
294 for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
295 if (DECL_INITIAL (local) != 0
296 && calls_function_1 (DECL_INITIAL (local), which))
297 return 1;
299 for (subblock = BLOCK_SUBBLOCKS (exp);
300 subblock;
301 subblock = TREE_CHAIN (subblock))
302 if (calls_function_1 (subblock, which))
303 return 1;
305 return 0;
307 case TREE_LIST:
308 for (; exp != 0; exp = TREE_CHAIN (exp))
309 if (calls_function_1 (TREE_VALUE (exp), which))
310 return 1;
311 return 0;
313 default:
314 break;
317 /* Only expressions, references, and blocks can contain calls. */
318 if (! IS_EXPR_CODE_CLASS (class) && class != 'r' && class != 'b')
319 return 0;
321 for (i = 0; i < length; i++)
322 if (TREE_OPERAND (exp, i) != 0
323 && calls_function_1 (TREE_OPERAND (exp, i), which))
324 return 1;
326 return 0;
329 /* Force FUNEXP into a form suitable for the address of a CALL,
330 and return that as an rtx. Also load the static chain register
331 if FNDECL is a nested function.
333 CALL_FUSAGE points to a variable holding the prospective
334 CALL_INSN_FUNCTION_USAGE information. */
337 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen, sibcallp)
338 rtx funexp;
339 tree fndecl;
340 rtx *call_fusage;
341 int reg_parm_seen;
342 int sibcallp;
344 rtx static_chain_value = 0;
346 funexp = protect_from_queue (funexp, 0);
348 if (fndecl != 0)
349 /* Get possible static chain value for nested function in C. */
350 static_chain_value = lookup_static_chain (fndecl);
352 /* Make a valid memory address and copy constants thru pseudo-regs,
353 but not for a constant address if -fno-function-cse. */
354 if (GET_CODE (funexp) != SYMBOL_REF)
355 /* If we are using registers for parameters, force the
356 function address into a register now. */
357 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
358 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
359 : memory_address (FUNCTION_MODE, funexp));
360 else if (! sibcallp)
362 #ifndef NO_FUNCTION_CSE
363 if (optimize && ! flag_no_function_cse)
364 #ifdef NO_RECURSIVE_FUNCTION_CSE
365 if (fndecl != current_function_decl)
366 #endif
367 funexp = force_reg (Pmode, funexp);
368 #endif
371 if (static_chain_value != 0)
373 emit_move_insn (static_chain_rtx, static_chain_value);
375 if (GET_CODE (static_chain_rtx) == REG)
376 use_reg (call_fusage, static_chain_rtx);
379 return funexp;
382 /* Generate instructions to call function FUNEXP,
383 and optionally pop the results.
384 The CALL_INSN is the first insn generated.
386 FNDECL is the declaration node of the function. This is given to the
387 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
389 FUNTYPE is the data type of the function. This is given to the macro
390 RETURN_POPS_ARGS to determine whether this function pops its own args.
391 We used to allow an identifier for library functions, but that doesn't
392 work when the return type is an aggregate type and the calling convention
393 says that the pointer to this aggregate is to be popped by the callee.
395 STACK_SIZE is the number of bytes of arguments on the stack,
396 ROUNDED_STACK_SIZE is that number rounded up to
397 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
398 both to put into the call insn and to generate explicit popping
399 code if necessary.
401 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
402 It is zero if this call doesn't want a structure value.
404 NEXT_ARG_REG is the rtx that results from executing
405 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
406 just after all the args have had their registers assigned.
407 This could be whatever you like, but normally it is the first
408 arg-register beyond those used for args in this call,
409 or 0 if all the arg-registers are used in this call.
410 It is passed on to `gen_call' so you can put this info in the call insn.
412 VALREG is a hard register in which a value is returned,
413 or 0 if the call does not return a value.
415 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
416 the args to this call were processed.
417 We restore `inhibit_defer_pop' to that value.
419 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
420 denote registers used by the called function. */
422 static void
423 emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
424 struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
425 call_fusage, ecf_flags, args_so_far)
426 rtx funexp;
427 tree fndecl ATTRIBUTE_UNUSED;
428 tree funtype ATTRIBUTE_UNUSED;
429 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED;
430 HOST_WIDE_INT rounded_stack_size;
431 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED;
432 rtx next_arg_reg ATTRIBUTE_UNUSED;
433 rtx valreg;
434 int old_inhibit_defer_pop;
435 rtx call_fusage;
436 int ecf_flags;
437 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED;
439 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
440 rtx call_insn;
441 int already_popped = 0;
442 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
443 #if defined (HAVE_call) && defined (HAVE_call_value)
444 rtx struct_value_size_rtx;
445 struct_value_size_rtx = GEN_INT (struct_value_size);
446 #endif
448 #ifdef CALL_POPS_ARGS
449 n_popped += CALL_POPS_ARGS (* args_so_far);
450 #endif
452 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
453 and we don't want to load it into a register as an optimization,
454 because prepare_call_address already did it if it should be done. */
455 if (GET_CODE (funexp) != SYMBOL_REF)
456 funexp = memory_address (FUNCTION_MODE, funexp);
458 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
459 if ((ecf_flags & ECF_SIBCALL)
460 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
461 && (n_popped > 0 || stack_size == 0))
463 rtx n_pop = GEN_INT (n_popped);
464 rtx pat;
466 /* If this subroutine pops its own args, record that in the call insn
467 if possible, for the sake of frame pointer elimination. */
469 if (valreg)
470 pat = GEN_SIBCALL_VALUE_POP (valreg,
471 gen_rtx_MEM (FUNCTION_MODE, funexp),
472 rounded_stack_size_rtx, next_arg_reg,
473 n_pop);
474 else
475 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
476 rounded_stack_size_rtx, next_arg_reg, n_pop);
478 emit_call_insn (pat);
479 already_popped = 1;
481 else
482 #endif
484 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
485 /* If the target has "call" or "call_value" insns, then prefer them
486 if no arguments are actually popped. If the target does not have
487 "call" or "call_value" insns, then we must use the popping versions
488 even if the call has no arguments to pop. */
489 #if defined (HAVE_call) && defined (HAVE_call_value)
490 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
491 && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
492 #else
493 if (HAVE_call_pop && HAVE_call_value_pop)
494 #endif
496 rtx n_pop = GEN_INT (n_popped);
497 rtx pat;
499 /* If this subroutine pops its own args, record that in the call insn
500 if possible, for the sake of frame pointer elimination. */
502 if (valreg)
503 pat = GEN_CALL_VALUE_POP (valreg,
504 gen_rtx_MEM (FUNCTION_MODE, funexp),
505 rounded_stack_size_rtx, next_arg_reg, n_pop);
506 else
507 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
508 rounded_stack_size_rtx, next_arg_reg, n_pop);
510 emit_call_insn (pat);
511 already_popped = 1;
513 else
514 #endif
516 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
517 if ((ecf_flags & ECF_SIBCALL)
518 && HAVE_sibcall && HAVE_sibcall_value)
520 if (valreg)
521 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
522 gen_rtx_MEM (FUNCTION_MODE, funexp),
523 rounded_stack_size_rtx,
524 next_arg_reg, NULL_RTX));
525 else
526 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
527 rounded_stack_size_rtx, next_arg_reg,
528 struct_value_size_rtx));
530 else
531 #endif
533 #if defined (HAVE_call) && defined (HAVE_call_value)
534 if (HAVE_call && HAVE_call_value)
536 if (valreg)
537 emit_call_insn (GEN_CALL_VALUE (valreg,
538 gen_rtx_MEM (FUNCTION_MODE, funexp),
539 rounded_stack_size_rtx, next_arg_reg,
540 NULL_RTX));
541 else
542 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
543 rounded_stack_size_rtx, next_arg_reg,
544 struct_value_size_rtx));
546 else
547 #endif
548 abort ();
550 /* Find the CALL insn we just emitted. */
551 for (call_insn = get_last_insn ();
552 call_insn && GET_CODE (call_insn) != CALL_INSN;
553 call_insn = PREV_INSN (call_insn))
556 if (! call_insn)
557 abort ();
559 /* Mark memory as used for "pure" function call. */
560 if (ecf_flags & ECF_PURE)
561 call_fusage
562 = gen_rtx_EXPR_LIST
563 (VOIDmode,
564 gen_rtx_USE (VOIDmode,
565 gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
566 call_fusage);
568 /* Put the register usage information on the CALL. If there is already
569 some usage information, put ours at the end. */
570 if (CALL_INSN_FUNCTION_USAGE (call_insn))
572 rtx link;
574 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
575 link = XEXP (link, 1))
578 XEXP (link, 1) = call_fusage;
580 else
581 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
583 /* If this is a const call, then set the insn's unchanging bit. */
584 if (ecf_flags & (ECF_CONST | ECF_PURE))
585 CONST_OR_PURE_CALL_P (call_insn) = 1;
587 /* If this call can't throw, attach a REG_EH_REGION reg note to that
588 effect. */
589 if (ecf_flags & ECF_NOTHROW)
590 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
591 REG_NOTES (call_insn));
592 else
593 note_eh_region_may_contain_throw ();
595 if (ecf_flags & ECF_NORETURN)
596 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
597 REG_NOTES (call_insn));
598 if (ecf_flags & ECF_ALWAYS_RETURN)
599 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_ALWAYS_RETURN, const0_rtx,
600 REG_NOTES (call_insn));
602 if (ecf_flags & ECF_RETURNS_TWICE)
604 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
605 REG_NOTES (call_insn));
606 current_function_calls_setjmp = 1;
609 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
611 /* Restore this now, so that we do defer pops for this call's args
612 if the context of the call as a whole permits. */
613 inhibit_defer_pop = old_inhibit_defer_pop;
615 if (n_popped > 0)
617 if (!already_popped)
618 CALL_INSN_FUNCTION_USAGE (call_insn)
619 = gen_rtx_EXPR_LIST (VOIDmode,
620 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
621 CALL_INSN_FUNCTION_USAGE (call_insn));
622 rounded_stack_size -= n_popped;
623 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
624 stack_pointer_delta -= n_popped;
627 if (!ACCUMULATE_OUTGOING_ARGS)
629 /* If returning from the subroutine does not automatically pop the args,
630 we need an instruction to pop them sooner or later.
631 Perhaps do it now; perhaps just record how much space to pop later.
633 If returning from the subroutine does pop the args, indicate that the
634 stack pointer will be changed. */
636 if (rounded_stack_size != 0)
638 if (ecf_flags & ECF_SP_DEPRESSED)
639 /* Just pretend we did the pop. */
640 stack_pointer_delta -= rounded_stack_size;
641 else if (flag_defer_pop && inhibit_defer_pop == 0
642 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
643 pending_stack_adjust += rounded_stack_size;
644 else
645 adjust_stack (rounded_stack_size_rtx);
648 /* When we accumulate outgoing args, we must avoid any stack manipulations.
649 Restore the stack pointer to its original value now. Usually
650 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
651 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
652 popping variants of functions exist as well.
654 ??? We may optimize similar to defer_pop above, but it is
655 probably not worthwhile.
657 ??? It will be worthwhile to enable combine_stack_adjustments even for
658 such machines. */
659 else if (n_popped)
660 anti_adjust_stack (GEN_INT (n_popped));
663 /* Determine if the function identified by NAME and FNDECL is one with
664 special properties we wish to know about.
666 For example, if the function might return more than one time (setjmp), then
667 set RETURNS_TWICE to a nonzero value.
669 Similarly set LONGJMP for if the function is in the longjmp family.
671 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
672 space from the stack such as alloca. */
674 static int
675 special_function_p (fndecl, flags)
676 tree fndecl;
677 int flags;
679 if (! (flags & ECF_MALLOC)
680 && fndecl && DECL_NAME (fndecl)
681 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
682 /* Exclude functions not at the file scope, or not `extern',
683 since they are not the magic functions we would otherwise
684 think they are. */
685 && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
687 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
688 const char *tname = name;
690 /* We assume that alloca will always be called by name. It
691 makes no sense to pass it as a pointer-to-function to
692 anything that does not understand its behavior. */
693 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
694 && name[0] == 'a'
695 && ! strcmp (name, "alloca"))
696 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
697 && name[0] == '_'
698 && ! strcmp (name, "__builtin_alloca"))))
699 flags |= ECF_MAY_BE_ALLOCA;
701 /* Disregard prefix _, __ or __x. */
702 if (name[0] == '_')
704 if (name[1] == '_' && name[2] == 'x')
705 tname += 3;
706 else if (name[1] == '_')
707 tname += 2;
708 else
709 tname += 1;
712 if (tname[0] == 's')
714 if ((tname[1] == 'e'
715 && (! strcmp (tname, "setjmp")
716 || ! strcmp (tname, "setjmp_syscall")))
717 || (tname[1] == 'i'
718 && ! strcmp (tname, "sigsetjmp"))
719 || (tname[1] == 'a'
720 && ! strcmp (tname, "savectx")))
721 flags |= ECF_RETURNS_TWICE;
723 if (tname[1] == 'i'
724 && ! strcmp (tname, "siglongjmp"))
725 flags |= ECF_LONGJMP;
727 else if ((tname[0] == 'q' && tname[1] == 's'
728 && ! strcmp (tname, "qsetjmp"))
729 || (tname[0] == 'v' && tname[1] == 'f'
730 && ! strcmp (tname, "vfork")))
731 flags |= ECF_RETURNS_TWICE;
733 else if (tname[0] == 'l' && tname[1] == 'o'
734 && ! strcmp (tname, "longjmp"))
735 flags |= ECF_LONGJMP;
737 else if ((tname[0] == 'f' && tname[1] == 'o'
738 && ! strcmp (tname, "fork"))
739 /* Linux specific: __clone. check NAME to insist on the
740 leading underscores, to avoid polluting the ISO / POSIX
741 namespace. */
742 || (name[0] == '_' && name[1] == '_'
743 && ! strcmp (tname, "clone"))
744 || (tname[0] == 'e' && tname[1] == 'x' && tname[2] == 'e'
745 && tname[3] == 'c' && (tname[4] == 'l' || tname[4] == 'v')
746 && (tname[5] == '\0'
747 || ((tname[5] == 'p' || tname[5] == 'e')
748 && tname[6] == '\0'))))
749 flags |= ECF_FORK_OR_EXEC;
751 return flags;
754 /* Return nonzero when tree represent call to longjmp. */
757 setjmp_call_p (fndecl)
758 tree fndecl;
760 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
763 /* Return true when exp contains alloca call. */
764 bool
765 alloca_call_p (exp)
766 tree exp;
768 if (TREE_CODE (exp) == CALL_EXPR
769 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
770 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
771 == FUNCTION_DECL)
772 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
773 0) & ECF_MAY_BE_ALLOCA))
774 return true;
775 return false;
778 /* Detect flags (function attributes) from the function decl or type node. */
781 flags_from_decl_or_type (exp)
782 tree exp;
784 int flags = 0;
785 tree type = exp;
787 if (DECL_P (exp))
789 struct cgraph_rtl_info *i = cgraph_rtl_info (exp);
790 type = TREE_TYPE (exp);
792 if (i)
794 if (i->pure_function)
795 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
796 if (i->const_function)
797 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
800 /* The function exp may have the `malloc' attribute. */
801 if (DECL_IS_MALLOC (exp))
802 flags |= ECF_MALLOC;
804 /* The function exp may have the `pure' attribute. */
805 if (DECL_IS_PURE (exp))
806 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
808 if (TREE_NOTHROW (exp))
809 flags |= ECF_NOTHROW;
812 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
813 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
815 if (TREE_THIS_VOLATILE (exp))
816 flags |= ECF_NORETURN;
818 /* Mark if the function returns with the stack pointer depressed. We
819 cannot consider it pure or constant in that case. */
820 if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
822 flags |= ECF_SP_DEPRESSED;
823 flags &= ~(ECF_PURE | ECF_CONST | ECF_LIBCALL_BLOCK);
826 return flags;
829 /* Precompute all register parameters as described by ARGS, storing values
830 into fields within the ARGS array.
832 NUM_ACTUALS indicates the total number elements in the ARGS array.
834 Set REG_PARM_SEEN if we encounter a register parameter. */
836 static void
837 precompute_register_parameters (num_actuals, args, reg_parm_seen)
838 int num_actuals;
839 struct arg_data *args;
840 int *reg_parm_seen;
842 int i;
844 *reg_parm_seen = 0;
846 for (i = 0; i < num_actuals; i++)
847 if (args[i].reg != 0 && ! args[i].pass_on_stack)
849 *reg_parm_seen = 1;
851 if (args[i].value == 0)
853 push_temp_slots ();
854 args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
855 VOIDmode, 0);
856 preserve_temp_slots (args[i].value);
857 pop_temp_slots ();
859 /* ANSI doesn't require a sequence point here,
860 but PCC has one, so this will avoid some problems. */
861 emit_queue ();
864 /* If the value is a non-legitimate constant, force it into a
865 pseudo now. TLS symbols sometimes need a call to resolve. */
866 if (CONSTANT_P (args[i].value)
867 && !LEGITIMATE_CONSTANT_P (args[i].value))
868 args[i].value = force_reg (args[i].mode, args[i].value);
870 /* If we are to promote the function arg to a wider mode,
871 do it now. */
873 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
874 args[i].value
875 = convert_modes (args[i].mode,
876 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
877 args[i].value, args[i].unsignedp);
879 /* If the value is expensive, and we are inside an appropriately
880 short loop, put the value into a pseudo and then put the pseudo
881 into the hard reg.
883 For small register classes, also do this if this call uses
884 register parameters. This is to avoid reload conflicts while
885 loading the parameters registers. */
887 if ((! (GET_CODE (args[i].value) == REG
888 || (GET_CODE (args[i].value) == SUBREG
889 && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
890 && args[i].mode != BLKmode
891 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
892 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
893 || preserve_subexpressions_p ()))
894 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
898 #ifdef REG_PARM_STACK_SPACE
900 /* The argument list is the property of the called routine and it
901 may clobber it. If the fixed area has been used for previous
902 parameters, we must save and restore it. */
904 static rtx
905 save_fixed_argument_area (reg_parm_stack_space, argblock,
906 low_to_save, high_to_save)
907 int reg_parm_stack_space;
908 rtx argblock;
909 int *low_to_save;
910 int *high_to_save;
912 int low;
913 int high;
915 /* Compute the boundary of the area that needs to be saved, if any. */
916 high = reg_parm_stack_space;
917 #ifdef ARGS_GROW_DOWNWARD
918 high += 1;
919 #endif
920 if (high > highest_outgoing_arg_in_use)
921 high = highest_outgoing_arg_in_use;
923 for (low = 0; low < high; low++)
924 if (stack_usage_map[low] != 0)
926 int num_to_save;
927 enum machine_mode save_mode;
928 int delta;
929 rtx stack_area;
930 rtx save_area;
932 while (stack_usage_map[--high] == 0)
935 *low_to_save = low;
936 *high_to_save = high;
938 num_to_save = high - low + 1;
939 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
941 /* If we don't have the required alignment, must do this
942 in BLKmode. */
943 if ((low & (MIN (GET_MODE_SIZE (save_mode),
944 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
945 save_mode = BLKmode;
947 #ifdef ARGS_GROW_DOWNWARD
948 delta = -high;
949 #else
950 delta = low;
951 #endif
952 stack_area = gen_rtx_MEM (save_mode,
953 memory_address (save_mode,
954 plus_constant (argblock,
955 delta)));
957 set_mem_align (stack_area, PARM_BOUNDARY);
958 if (save_mode == BLKmode)
960 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
961 emit_block_move (validize_mem (save_area), stack_area,
962 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
964 else
966 save_area = gen_reg_rtx (save_mode);
967 emit_move_insn (save_area, stack_area);
970 return save_area;
973 return NULL_RTX;
976 static void
977 restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
978 rtx save_area;
979 rtx argblock;
980 int high_to_save;
981 int low_to_save;
983 enum machine_mode save_mode = GET_MODE (save_area);
984 int delta;
985 rtx stack_area;
987 #ifdef ARGS_GROW_DOWNWARD
988 delta = -high_to_save;
989 #else
990 delta = low_to_save;
991 #endif
992 stack_area = gen_rtx_MEM (save_mode,
993 memory_address (save_mode,
994 plus_constant (argblock, delta)));
995 set_mem_align (stack_area, PARM_BOUNDARY);
997 if (save_mode != BLKmode)
998 emit_move_insn (stack_area, save_area);
999 else
1000 emit_block_move (stack_area, validize_mem (save_area),
1001 GEN_INT (high_to_save - low_to_save + 1),
1002 BLOCK_OP_CALL_PARM);
1004 #endif /* REG_PARM_STACK_SPACE */
1006 /* If any elements in ARGS refer to parameters that are to be passed in
1007 registers, but not in memory, and whose alignment does not permit a
1008 direct copy into registers. Copy the values into a group of pseudos
1009 which we will later copy into the appropriate hard registers.
1011 Pseudos for each unaligned argument will be stored into the array
1012 args[argnum].aligned_regs. The caller is responsible for deallocating
1013 the aligned_regs array if it is nonzero. */
1015 static void
1016 store_unaligned_arguments_into_pseudos (args, num_actuals)
1017 struct arg_data *args;
1018 int num_actuals;
1020 int i, j;
1022 for (i = 0; i < num_actuals; i++)
1023 if (args[i].reg != 0 && ! args[i].pass_on_stack
1024 && args[i].mode == BLKmode
1025 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1026 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1028 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1029 int big_endian_correction = 0;
1031 args[i].n_aligned_regs
1032 = args[i].partial ? args[i].partial
1033 : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1035 args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
1036 * args[i].n_aligned_regs);
1038 /* Structures smaller than a word are aligned to the least
1039 significant byte (to the right). On a BYTES_BIG_ENDIAN machine,
1040 this means we must skip the empty high order bytes when
1041 calculating the bit offset. */
1042 if (BYTES_BIG_ENDIAN
1043 && bytes < UNITS_PER_WORD)
1044 big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT));
1046 for (j = 0; j < args[i].n_aligned_regs; j++)
1048 rtx reg = gen_reg_rtx (word_mode);
1049 rtx word = operand_subword_force (args[i].value, j, BLKmode);
1050 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1052 args[i].aligned_regs[j] = reg;
1054 /* There is no need to restrict this code to loading items
1055 in TYPE_ALIGN sized hunks. The bitfield instructions can
1056 load up entire word sized registers efficiently.
1058 ??? This may not be needed anymore.
1059 We use to emit a clobber here but that doesn't let later
1060 passes optimize the instructions we emit. By storing 0 into
1061 the register later passes know the first AND to zero out the
1062 bitfield being set in the register is unnecessary. The store
1063 of 0 will be deleted as will at least the first AND. */
1065 emit_move_insn (reg, const0_rtx);
1067 bytes -= bitsize / BITS_PER_UNIT;
1068 store_bit_field (reg, bitsize, big_endian_correction, word_mode,
1069 extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1070 word_mode, word_mode,
1071 BITS_PER_WORD),
1072 BITS_PER_WORD);
1077 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1078 ACTPARMS.
1080 NUM_ACTUALS is the total number of parameters.
1082 N_NAMED_ARGS is the total number of named arguments.
1084 FNDECL is the tree code for the target of this call (if known)
1086 ARGS_SO_FAR holds state needed by the target to know where to place
1087 the next argument.
1089 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1090 for arguments which are passed in registers.
1092 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1093 and may be modified by this routine.
1095 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1096 flags which may may be modified by this routine. */
1098 static void
1099 initialize_argument_information (num_actuals, args, args_size, n_named_args,
1100 actparms, fndecl, args_so_far,
1101 reg_parm_stack_space, old_stack_level,
1102 old_pending_adj, must_preallocate,
1103 ecf_flags)
1104 int num_actuals ATTRIBUTE_UNUSED;
1105 struct arg_data *args;
1106 struct args_size *args_size;
1107 int n_named_args ATTRIBUTE_UNUSED;
1108 tree actparms;
1109 tree fndecl;
1110 CUMULATIVE_ARGS *args_so_far;
1111 int reg_parm_stack_space;
1112 rtx *old_stack_level;
1113 int *old_pending_adj;
1114 int *must_preallocate;
1115 int *ecf_flags;
1117 /* 1 if scanning parms front to back, -1 if scanning back to front. */
1118 int inc;
1120 /* Count arg position in order args appear. */
1121 int argpos;
1123 struct args_size alignment_pad;
1124 int i;
1125 tree p;
1127 args_size->constant = 0;
1128 args_size->var = 0;
1130 /* In this loop, we consider args in the order they are written.
1131 We fill up ARGS from the front or from the back if necessary
1132 so that in any case the first arg to be pushed ends up at the front. */
1134 if (PUSH_ARGS_REVERSED)
1136 i = num_actuals - 1, inc = -1;
1137 /* In this case, must reverse order of args
1138 so that we compute and push the last arg first. */
1140 else
1142 i = 0, inc = 1;
1145 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
1146 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
1148 tree type = TREE_TYPE (TREE_VALUE (p));
1149 int unsignedp;
1150 enum machine_mode mode;
1152 args[i].tree_value = TREE_VALUE (p);
1154 /* Replace erroneous argument with constant zero. */
1155 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1156 args[i].tree_value = integer_zero_node, type = integer_type_node;
1158 /* If TYPE is a transparent union, pass things the way we would
1159 pass the first field of the union. We have already verified that
1160 the modes are the same. */
1161 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
1162 type = TREE_TYPE (TYPE_FIELDS (type));
1164 /* Decide where to pass this arg.
1166 args[i].reg is nonzero if all or part is passed in registers.
1168 args[i].partial is nonzero if part but not all is passed in registers,
1169 and the exact value says how many words are passed in registers.
1171 args[i].pass_on_stack is nonzero if the argument must at least be
1172 computed on the stack. It may then be loaded back into registers
1173 if args[i].reg is nonzero.
1175 These decisions are driven by the FUNCTION_... macros and must agree
1176 with those made by function.c. */
1178 /* See if this argument should be passed by invisible reference. */
1179 if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1180 && contains_placeholder_p (TYPE_SIZE (type)))
1181 || TREE_ADDRESSABLE (type)
1182 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1183 || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
1184 type, argpos < n_named_args)
1185 #endif
1188 /* If we're compiling a thunk, pass through invisible
1189 references instead of making a copy. */
1190 if (current_function_is_thunk
1191 #ifdef FUNCTION_ARG_CALLEE_COPIES
1192 || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
1193 type, argpos < n_named_args)
1194 /* If it's in a register, we must make a copy of it too. */
1195 /* ??? Is this a sufficient test? Is there a better one? */
1196 && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1197 && REG_P (DECL_RTL (args[i].tree_value)))
1198 && ! TREE_ADDRESSABLE (type))
1199 #endif
1202 /* C++ uses a TARGET_EXPR to indicate that we want to make a
1203 new object from the argument. If we are passing by
1204 invisible reference, the callee will do that for us, so we
1205 can strip off the TARGET_EXPR. This is not always safe,
1206 but it is safe in the only case where this is a useful
1207 optimization; namely, when the argument is a plain object.
1208 In that case, the frontend is just asking the backend to
1209 make a bitwise copy of the argument. */
1211 if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
1212 && (DECL_P (TREE_OPERAND (args[i].tree_value, 1)))
1213 && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1214 args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1216 args[i].tree_value = build1 (ADDR_EXPR,
1217 build_pointer_type (type),
1218 args[i].tree_value);
1219 type = build_pointer_type (type);
1221 else if (TREE_CODE (args[i].tree_value) == TARGET_EXPR)
1223 /* In the V3 C++ ABI, parameters are destroyed in the caller.
1224 We implement this by passing the address of the temporary
1225 rather than expanding it into another allocated slot. */
1226 args[i].tree_value = build1 (ADDR_EXPR,
1227 build_pointer_type (type),
1228 args[i].tree_value);
1229 type = build_pointer_type (type);
1231 else
1233 /* We make a copy of the object and pass the address to the
1234 function being called. */
1235 rtx copy;
1237 if (!COMPLETE_TYPE_P (type)
1238 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1239 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1240 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1241 STACK_CHECK_MAX_VAR_SIZE))))
1243 /* This is a variable-sized object. Make space on the stack
1244 for it. */
1245 rtx size_rtx = expr_size (TREE_VALUE (p));
1247 if (*old_stack_level == 0)
1249 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1250 *old_pending_adj = pending_stack_adjust;
1251 pending_stack_adjust = 0;
1254 copy = gen_rtx_MEM (BLKmode,
1255 allocate_dynamic_stack_space
1256 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1257 set_mem_attributes (copy, type, 1);
1259 else
1260 copy = assign_temp (type, 0, 1, 0);
1262 store_expr (args[i].tree_value, copy, 0);
1263 *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1265 args[i].tree_value = build1 (ADDR_EXPR,
1266 build_pointer_type (type),
1267 make_tree (type, copy));
1268 type = build_pointer_type (type);
1272 mode = TYPE_MODE (type);
1273 unsignedp = TREE_UNSIGNED (type);
1275 #ifdef PROMOTE_FUNCTION_ARGS
1276 mode = promote_mode (type, mode, &unsignedp, 1);
1277 #endif
1279 args[i].unsignedp = unsignedp;
1280 args[i].mode = mode;
1282 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1283 argpos < n_named_args);
1284 #ifdef FUNCTION_INCOMING_ARG
1285 /* If this is a sibling call and the machine has register windows, the
1286 register window has to be unwinded before calling the routine, so
1287 arguments have to go into the incoming registers. */
1288 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1289 argpos < n_named_args);
1290 #else
1291 args[i].tail_call_reg = args[i].reg;
1292 #endif
1294 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1295 if (args[i].reg)
1296 args[i].partial
1297 = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1298 argpos < n_named_args);
1299 #endif
1301 args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1303 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1304 it means that we are to pass this arg in the register(s) designated
1305 by the PARALLEL, but also to pass it in the stack. */
1306 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1307 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1308 args[i].pass_on_stack = 1;
1310 /* If this is an addressable type, we must preallocate the stack
1311 since we must evaluate the object into its final location.
1313 If this is to be passed in both registers and the stack, it is simpler
1314 to preallocate. */
1315 if (TREE_ADDRESSABLE (type)
1316 || (args[i].pass_on_stack && args[i].reg != 0))
1317 *must_preallocate = 1;
1319 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1320 we cannot consider this function call constant. */
1321 if (TREE_ADDRESSABLE (type))
1322 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1324 /* Compute the stack-size of this argument. */
1325 if (args[i].reg == 0 || args[i].partial != 0
1326 || reg_parm_stack_space > 0
1327 || args[i].pass_on_stack)
1328 locate_and_pad_parm (mode, type,
1329 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1331 #else
1332 args[i].reg != 0,
1333 #endif
1334 fndecl, args_size, &args[i].offset,
1335 &args[i].size, &alignment_pad);
1337 #ifndef ARGS_GROW_DOWNWARD
1338 args[i].slot_offset = *args_size;
1339 #endif
1341 args[i].alignment_pad = alignment_pad;
1343 /* If a part of the arg was put into registers,
1344 don't include that part in the amount pushed. */
1345 if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1346 args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1347 / (PARM_BOUNDARY / BITS_PER_UNIT)
1348 * (PARM_BOUNDARY / BITS_PER_UNIT));
1350 /* Update ARGS_SIZE, the total stack space for args so far. */
1352 args_size->constant += args[i].size.constant;
1353 if (args[i].size.var)
1355 ADD_PARM_SIZE (*args_size, args[i].size.var);
1358 /* Since the slot offset points to the bottom of the slot,
1359 we must record it after incrementing if the args grow down. */
1360 #ifdef ARGS_GROW_DOWNWARD
1361 args[i].slot_offset = *args_size;
1363 args[i].slot_offset.constant = -args_size->constant;
1364 if (args_size->var)
1365 SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
1366 #endif
1368 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1369 have been used, etc. */
1371 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1372 argpos < n_named_args);
1376 /* Update ARGS_SIZE to contain the total size for the argument block.
1377 Return the original constant component of the argument block's size.
1379 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1380 for arguments passed in registers. */
1382 static int
1383 compute_argument_block_size (reg_parm_stack_space, args_size,
1384 preferred_stack_boundary)
1385 int reg_parm_stack_space;
1386 struct args_size *args_size;
1387 int preferred_stack_boundary ATTRIBUTE_UNUSED;
1389 int unadjusted_args_size = args_size->constant;
1391 /* For accumulate outgoing args mode we don't need to align, since the frame
1392 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1393 backends from generating misaligned frame sizes. */
1394 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1395 preferred_stack_boundary = STACK_BOUNDARY;
1397 /* Compute the actual size of the argument block required. The variable
1398 and constant sizes must be combined, the size may have to be rounded,
1399 and there may be a minimum required size. */
1401 if (args_size->var)
1403 args_size->var = ARGS_SIZE_TREE (*args_size);
1404 args_size->constant = 0;
1406 preferred_stack_boundary /= BITS_PER_UNIT;
1407 if (preferred_stack_boundary > 1)
1409 /* We don't handle this case yet. To handle it correctly we have
1410 to add the delta, round and subtract the delta.
1411 Currently no machine description requires this support. */
1412 if (stack_pointer_delta & (preferred_stack_boundary - 1))
1413 abort ();
1414 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1417 if (reg_parm_stack_space > 0)
1419 args_size->var
1420 = size_binop (MAX_EXPR, args_size->var,
1421 ssize_int (reg_parm_stack_space));
1423 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1424 /* The area corresponding to register parameters is not to count in
1425 the size of the block we need. So make the adjustment. */
1426 args_size->var
1427 = size_binop (MINUS_EXPR, args_size->var,
1428 ssize_int (reg_parm_stack_space));
1429 #endif
1432 else
1434 preferred_stack_boundary /= BITS_PER_UNIT;
1435 if (preferred_stack_boundary < 1)
1436 preferred_stack_boundary = 1;
1437 args_size->constant = (((args_size->constant
1438 + stack_pointer_delta
1439 + preferred_stack_boundary - 1)
1440 / preferred_stack_boundary
1441 * preferred_stack_boundary)
1442 - stack_pointer_delta);
1444 args_size->constant = MAX (args_size->constant,
1445 reg_parm_stack_space);
1447 #ifdef MAYBE_REG_PARM_STACK_SPACE
1448 if (reg_parm_stack_space == 0)
1449 args_size->constant = 0;
1450 #endif
1452 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1453 args_size->constant -= reg_parm_stack_space;
1454 #endif
1456 return unadjusted_args_size;
1459 /* Precompute parameters as needed for a function call.
1461 FLAGS is mask of ECF_* constants.
1463 NUM_ACTUALS is the number of arguments.
1465 ARGS is an array containing information for each argument; this
1466 routine fills in the INITIAL_VALUE and VALUE fields for each
1467 precomputed argument. */
1469 static void
1470 precompute_arguments (flags, num_actuals, args)
1471 int flags;
1472 int num_actuals;
1473 struct arg_data *args;
1475 int i;
1477 /* If this function call is cse'able, precompute all the parameters.
1478 Note that if the parameter is constructed into a temporary, this will
1479 cause an additional copy because the parameter will be constructed
1480 into a temporary location and then copied into the outgoing arguments.
1481 If a parameter contains a call to alloca and this function uses the
1482 stack, precompute the parameter. */
1484 /* If we preallocated the stack space, and some arguments must be passed
1485 on the stack, then we must precompute any parameter which contains a
1486 function call which will store arguments on the stack.
1487 Otherwise, evaluating the parameter may clobber previous parameters
1488 which have already been stored into the stack. (we have code to avoid
1489 such case by saving the outgoing stack arguments, but it results in
1490 worse code) */
1492 for (i = 0; i < num_actuals; i++)
1493 if ((flags & ECF_LIBCALL_BLOCK)
1494 || calls_function (args[i].tree_value, !ACCUMULATE_OUTGOING_ARGS))
1496 enum machine_mode mode;
1498 /* If this is an addressable type, we cannot pre-evaluate it. */
1499 if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1500 abort ();
1502 args[i].value
1503 = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1505 /* ANSI doesn't require a sequence point here,
1506 but PCC has one, so this will avoid some problems. */
1507 emit_queue ();
1509 args[i].initial_value = args[i].value
1510 = protect_from_queue (args[i].value, 0);
1512 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1513 if (mode != args[i].mode)
1515 args[i].value
1516 = convert_modes (args[i].mode, mode,
1517 args[i].value, args[i].unsignedp);
1518 #ifdef PROMOTE_FOR_CALL_ONLY
1519 /* CSE will replace this only if it contains args[i].value
1520 pseudo, so convert it down to the declared mode using
1521 a SUBREG. */
1522 if (GET_CODE (args[i].value) == REG
1523 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1525 args[i].initial_value
1526 = gen_lowpart_SUBREG (mode, args[i].value);
1527 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1528 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1529 args[i].unsignedp);
1531 #endif
1536 /* Given the current state of MUST_PREALLOCATE and information about
1537 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1538 compute and return the final value for MUST_PREALLOCATE. */
1540 static int
1541 finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1542 int must_preallocate;
1543 int num_actuals;
1544 struct arg_data *args;
1545 struct args_size *args_size;
1547 /* See if we have or want to preallocate stack space.
1549 If we would have to push a partially-in-regs parm
1550 before other stack parms, preallocate stack space instead.
1552 If the size of some parm is not a multiple of the required stack
1553 alignment, we must preallocate.
1555 If the total size of arguments that would otherwise create a copy in
1556 a temporary (such as a CALL) is more than half the total argument list
1557 size, preallocation is faster.
1559 Another reason to preallocate is if we have a machine (like the m88k)
1560 where stack alignment is required to be maintained between every
1561 pair of insns, not just when the call is made. However, we assume here
1562 that such machines either do not have push insns (and hence preallocation
1563 would occur anyway) or the problem is taken care of with
1564 PUSH_ROUNDING. */
1566 if (! must_preallocate)
1568 int partial_seen = 0;
1569 int copy_to_evaluate_size = 0;
1570 int i;
1572 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1574 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1575 partial_seen = 1;
1576 else if (partial_seen && args[i].reg == 0)
1577 must_preallocate = 1;
1579 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1580 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1581 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1582 || TREE_CODE (args[i].tree_value) == COND_EXPR
1583 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1584 copy_to_evaluate_size
1585 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1588 if (copy_to_evaluate_size * 2 >= args_size->constant
1589 && args_size->constant > 0)
1590 must_preallocate = 1;
1592 return must_preallocate;
1595 /* If we preallocated stack space, compute the address of each argument
1596 and store it into the ARGS array.
1598 We need not ensure it is a valid memory address here; it will be
1599 validized when it is used.
1601 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1603 static void
1604 compute_argument_addresses (args, argblock, num_actuals)
1605 struct arg_data *args;
1606 rtx argblock;
1607 int num_actuals;
1609 if (argblock)
1611 rtx arg_reg = argblock;
1612 int i, arg_offset = 0;
1614 if (GET_CODE (argblock) == PLUS)
1615 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1617 for (i = 0; i < num_actuals; i++)
1619 rtx offset = ARGS_SIZE_RTX (args[i].offset);
1620 rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1621 rtx addr;
1623 /* Skip this parm if it will not be passed on the stack. */
1624 if (! args[i].pass_on_stack && args[i].reg != 0)
1625 continue;
1627 if (GET_CODE (offset) == CONST_INT)
1628 addr = plus_constant (arg_reg, INTVAL (offset));
1629 else
1630 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1632 addr = plus_constant (addr, arg_offset);
1633 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1634 set_mem_align (args[i].stack, PARM_BOUNDARY);
1635 set_mem_attributes (args[i].stack,
1636 TREE_TYPE (args[i].tree_value), 1);
1638 if (GET_CODE (slot_offset) == CONST_INT)
1639 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1640 else
1641 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1643 addr = plus_constant (addr, arg_offset);
1644 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1645 set_mem_align (args[i].stack_slot, PARM_BOUNDARY);
1646 set_mem_attributes (args[i].stack_slot,
1647 TREE_TYPE (args[i].tree_value), 1);
1649 /* Function incoming arguments may overlap with sibling call
1650 outgoing arguments and we cannot allow reordering of reads
1651 from function arguments with stores to outgoing arguments
1652 of sibling calls. */
1653 set_mem_alias_set (args[i].stack, 0);
1654 set_mem_alias_set (args[i].stack_slot, 0);
1659 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1660 in a call instruction.
1662 FNDECL is the tree node for the target function. For an indirect call
1663 FNDECL will be NULL_TREE.
1665 ADDR is the operand 0 of CALL_EXPR for this call. */
1667 static rtx
1668 rtx_for_function_call (fndecl, addr)
1669 tree fndecl;
1670 tree addr;
1672 rtx funexp;
1674 /* Get the function to call, in the form of RTL. */
1675 if (fndecl)
1677 /* If this is the first use of the function, see if we need to
1678 make an external definition for it. */
1679 if (! TREE_USED (fndecl))
1681 assemble_external (fndecl);
1682 TREE_USED (fndecl) = 1;
1685 /* Get a SYMBOL_REF rtx for the function address. */
1686 funexp = XEXP (DECL_RTL (fndecl), 0);
1688 else
1689 /* Generate an rtx (probably a pseudo-register) for the address. */
1691 push_temp_slots ();
1692 funexp = expand_expr (addr, NULL_RTX, VOIDmode, 0);
1693 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1694 emit_queue ();
1696 return funexp;
1699 /* Do the register loads required for any wholly-register parms or any
1700 parms which are passed both on the stack and in a register. Their
1701 expressions were already evaluated.
1703 Mark all register-parms as living through the call, putting these USE
1704 insns in the CALL_INSN_FUNCTION_USAGE field.
1706 When IS_SIBCALL, perform the check_sibcall_overlap_argument_overlap
1707 checking, setting *SIBCALL_FAILURE if appropriate. */
1709 static void
1710 load_register_parameters (args, num_actuals, call_fusage, flags,
1711 is_sibcall, sibcall_failure)
1712 struct arg_data *args;
1713 int num_actuals;
1714 rtx *call_fusage;
1715 int flags;
1716 int is_sibcall;
1717 int *sibcall_failure;
1719 int i, j;
1721 #ifdef LOAD_ARGS_REVERSED
1722 for (i = num_actuals - 1; i >= 0; i--)
1723 #else
1724 for (i = 0; i < num_actuals; i++)
1725 #endif
1727 rtx reg = ((flags & ECF_SIBCALL)
1728 ? args[i].tail_call_reg : args[i].reg);
1729 int partial = args[i].partial;
1730 int nregs;
1732 if (reg)
1734 rtx before_arg = get_last_insn ();
1735 /* Set to non-negative if must move a word at a time, even if just
1736 one word (e.g, partial == 1 && mode == DFmode). Set to -1 if
1737 we just use a normal move insn. This value can be zero if the
1738 argument is a zero size structure with no fields. */
1739 nregs = (partial ? partial
1740 : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1741 ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1742 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1743 : -1));
1745 /* Handle calls that pass values in multiple non-contiguous
1746 locations. The Irix 6 ABI has examples of this. */
1748 if (GET_CODE (reg) == PARALLEL)
1749 emit_group_load (reg, args[i].value,
1750 int_size_in_bytes (TREE_TYPE (args[i].tree_value)));
1752 /* If simple case, just do move. If normal partial, store_one_arg
1753 has already loaded the register for us. In all other cases,
1754 load the register(s) from memory. */
1756 else if (nregs == -1)
1757 emit_move_insn (reg, args[i].value);
1759 /* If we have pre-computed the values to put in the registers in
1760 the case of non-aligned structures, copy them in now. */
1762 else if (args[i].n_aligned_regs != 0)
1763 for (j = 0; j < args[i].n_aligned_regs; j++)
1764 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1765 args[i].aligned_regs[j]);
1767 else if (partial == 0 || args[i].pass_on_stack)
1768 move_block_to_reg (REGNO (reg),
1769 validize_mem (args[i].value), nregs,
1770 args[i].mode);
1772 /* When a parameter is a block, and perhaps in other cases, it is
1773 possible that it did a load from an argument slot that was
1774 already clobbered. */
1775 if (is_sibcall
1776 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1777 *sibcall_failure = 1;
1779 /* Handle calls that pass values in multiple non-contiguous
1780 locations. The Irix 6 ABI has examples of this. */
1781 if (GET_CODE (reg) == PARALLEL)
1782 use_group_regs (call_fusage, reg);
1783 else if (nregs == -1)
1784 use_reg (call_fusage, reg);
1785 else
1786 use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1791 /* Try to integrate function. See expand_inline_function for documentation
1792 about the parameters. */
1794 static rtx
1795 try_to_integrate (fndecl, actparms, target, ignore, type, structure_value_addr)
1796 tree fndecl;
1797 tree actparms;
1798 rtx target;
1799 int ignore;
1800 tree type;
1801 rtx structure_value_addr;
1803 rtx temp;
1804 rtx before_call;
1805 int i;
1806 rtx old_stack_level = 0;
1807 int reg_parm_stack_space = 0;
1809 #ifdef REG_PARM_STACK_SPACE
1810 #ifdef MAYBE_REG_PARM_STACK_SPACE
1811 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1812 #else
1813 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1814 #endif
1815 #endif
1817 before_call = get_last_insn ();
1819 timevar_push (TV_INTEGRATION);
1821 temp = expand_inline_function (fndecl, actparms, target,
1822 ignore, type,
1823 structure_value_addr);
1825 timevar_pop (TV_INTEGRATION);
1827 /* If inlining succeeded, return. */
1828 if (temp != (rtx) (size_t) - 1)
1830 if (ACCUMULATE_OUTGOING_ARGS)
1832 /* If the outgoing argument list must be preserved, push
1833 the stack before executing the inlined function if it
1834 makes any calls. */
1836 i = reg_parm_stack_space;
1837 if (i > highest_outgoing_arg_in_use)
1838 i = highest_outgoing_arg_in_use;
1839 while (--i >= 0 && stack_usage_map[i] == 0)
1842 if (stack_arg_under_construction || i >= 0)
1844 rtx first_insn
1845 = before_call ? NEXT_INSN (before_call) : get_insns ();
1846 rtx insn = NULL_RTX, seq;
1848 /* Look for a call in the inline function code.
1849 If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1850 nonzero then there is a call and it is not necessary
1851 to scan the insns. */
1853 if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
1854 for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1855 if (GET_CODE (insn) == CALL_INSN)
1856 break;
1858 if (insn)
1860 /* Reserve enough stack space so that the largest
1861 argument list of any function call in the inline
1862 function does not overlap the argument list being
1863 evaluated. This is usually an overestimate because
1864 allocate_dynamic_stack_space reserves space for an
1865 outgoing argument list in addition to the requested
1866 space, but there is no way to ask for stack space such
1867 that an argument list of a certain length can be
1868 safely constructed.
1870 Add the stack space reserved for register arguments, if
1871 any, in the inline function. What is really needed is the
1872 largest value of reg_parm_stack_space in the inline
1873 function, but that is not available. Using the current
1874 value of reg_parm_stack_space is wrong, but gives
1875 correct results on all supported machines. */
1877 int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
1878 + reg_parm_stack_space);
1880 start_sequence ();
1881 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1882 allocate_dynamic_stack_space (GEN_INT (adjust),
1883 NULL_RTX, BITS_PER_UNIT);
1884 seq = get_insns ();
1885 end_sequence ();
1886 emit_insn_before (seq, first_insn);
1887 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1892 /* If the result is equivalent to TARGET, return TARGET to simplify
1893 checks in store_expr. They can be equivalent but not equal in the
1894 case of a function that returns BLKmode. */
1895 if (temp != target && rtx_equal_p (temp, target))
1896 return target;
1897 return temp;
1900 /* If inlining failed, mark FNDECL as needing to be compiled
1901 separately after all. If function was declared inline,
1902 give a warning. */
1903 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1904 && optimize > 0 && !TREE_ADDRESSABLE (fndecl))
1906 warning_with_decl (fndecl, "inlining failed in call to `%s'");
1907 warning ("called from here");
1909 (*lang_hooks.mark_addressable) (fndecl);
1910 return (rtx) (size_t) - 1;
1913 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1914 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1915 bytes, then we would need to push some additional bytes to pad the
1916 arguments. So, we compute an adjust to the stack pointer for an
1917 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1918 bytes. Then, when the arguments are pushed the stack will be perfectly
1919 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1920 be popped after the call. Returns the adjustment. */
1922 static int
1923 combine_pending_stack_adjustment_and_call (unadjusted_args_size,
1924 args_size,
1925 preferred_unit_stack_boundary)
1926 int unadjusted_args_size;
1927 struct args_size *args_size;
1928 int preferred_unit_stack_boundary;
1930 /* The number of bytes to pop so that the stack will be
1931 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1932 HOST_WIDE_INT adjustment;
1933 /* The alignment of the stack after the arguments are pushed, if we
1934 just pushed the arguments without adjust the stack here. */
1935 HOST_WIDE_INT unadjusted_alignment;
1937 unadjusted_alignment
1938 = ((stack_pointer_delta + unadjusted_args_size)
1939 % preferred_unit_stack_boundary);
1941 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1942 as possible -- leaving just enough left to cancel out the
1943 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1944 PENDING_STACK_ADJUST is non-negative, and congruent to
1945 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1947 /* Begin by trying to pop all the bytes. */
1948 unadjusted_alignment
1949 = (unadjusted_alignment
1950 - (pending_stack_adjust % preferred_unit_stack_boundary));
1951 adjustment = pending_stack_adjust;
1952 /* Push enough additional bytes that the stack will be aligned
1953 after the arguments are pushed. */
1954 if (preferred_unit_stack_boundary > 1)
1956 if (unadjusted_alignment > 0)
1957 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1958 else
1959 adjustment += unadjusted_alignment;
1962 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1963 bytes after the call. The right number is the entire
1964 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1965 by the arguments in the first place. */
1966 args_size->constant
1967 = pending_stack_adjust - adjustment + unadjusted_args_size;
1969 return adjustment;
1972 /* Scan X expression if it does not dereference any argument slots
1973 we already clobbered by tail call arguments (as noted in stored_args_map
1974 bitmap).
1975 Return nonzero if X expression dereferences such argument slots,
1976 zero otherwise. */
1978 static int
1979 check_sibcall_argument_overlap_1 (x)
1980 rtx x;
1982 RTX_CODE code;
1983 int i, j;
1984 unsigned int k;
1985 const char *fmt;
1987 if (x == NULL_RTX)
1988 return 0;
1990 code = GET_CODE (x);
1992 if (code == MEM)
1994 if (XEXP (x, 0) == current_function_internal_arg_pointer)
1995 i = 0;
1996 else if (GET_CODE (XEXP (x, 0)) == PLUS
1997 && XEXP (XEXP (x, 0), 0) ==
1998 current_function_internal_arg_pointer
1999 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2000 i = INTVAL (XEXP (XEXP (x, 0), 1));
2001 else
2002 return 0;
2004 #ifdef ARGS_GROW_DOWNWARD
2005 i = -i - GET_MODE_SIZE (GET_MODE (x));
2006 #endif
2008 for (k = 0; k < GET_MODE_SIZE (GET_MODE (x)); k++)
2009 if (i + k < stored_args_map->n_bits
2010 && TEST_BIT (stored_args_map, i + k))
2011 return 1;
2013 return 0;
2016 /* Scan all subexpressions. */
2017 fmt = GET_RTX_FORMAT (code);
2018 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2020 if (*fmt == 'e')
2022 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
2023 return 1;
2025 else if (*fmt == 'E')
2027 for (j = 0; j < XVECLEN (x, i); j++)
2028 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
2029 return 1;
2032 return 0;
2035 /* Scan sequence after INSN if it does not dereference any argument slots
2036 we already clobbered by tail call arguments (as noted in stored_args_map
2037 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
2038 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
2039 should be 0). Return nonzero if sequence after INSN dereferences such argument
2040 slots, zero otherwise. */
2042 static int
2043 check_sibcall_argument_overlap (insn, arg, mark_stored_args_map)
2044 rtx insn;
2045 struct arg_data *arg;
2046 int mark_stored_args_map;
2048 int low, high;
2050 if (insn == NULL_RTX)
2051 insn = get_insns ();
2052 else
2053 insn = NEXT_INSN (insn);
2055 for (; insn; insn = NEXT_INSN (insn))
2056 if (INSN_P (insn)
2057 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
2058 break;
2060 if (mark_stored_args_map)
2062 #ifdef ARGS_GROW_DOWNWARD
2063 low = -arg->slot_offset.constant - arg->size.constant;
2064 #else
2065 low = arg->slot_offset.constant;
2066 #endif
2068 for (high = low + arg->size.constant; low < high; low++)
2069 SET_BIT (stored_args_map, low);
2071 return insn != NULL_RTX;
2074 static tree
2075 fix_unsafe_tree (t)
2076 tree t;
2078 switch (unsafe_for_reeval (t))
2080 case 0: /* Safe. */
2081 break;
2083 case 1: /* Mildly unsafe. */
2084 t = unsave_expr (t);
2085 break;
2087 case 2: /* Wildly unsafe. */
2089 tree var = build_decl (VAR_DECL, NULL_TREE,
2090 TREE_TYPE (t));
2091 SET_DECL_RTL (var,
2092 expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL));
2093 t = var;
2095 break;
2097 default:
2098 abort ();
2100 return t;
2103 /* Generate all the code for a function call
2104 and return an rtx for its value.
2105 Store the value in TARGET (specified as an rtx) if convenient.
2106 If the value is stored in TARGET then TARGET is returned.
2107 If IGNORE is nonzero, then we ignore the value of the function call. */
2110 expand_call (exp, target, ignore)
2111 tree exp;
2112 rtx target;
2113 int ignore;
2115 /* Nonzero if we are currently expanding a call. */
2116 static int currently_expanding_call = 0;
2118 /* List of actual parameters. */
2119 tree actparms = TREE_OPERAND (exp, 1);
2120 /* RTX for the function to be called. */
2121 rtx funexp;
2122 /* Sequence of insns to perform a tail recursive "call". */
2123 rtx tail_recursion_insns = NULL_RTX;
2124 /* Sequence of insns to perform a normal "call". */
2125 rtx normal_call_insns = NULL_RTX;
2126 /* Sequence of insns to perform a tail recursive "call". */
2127 rtx tail_call_insns = NULL_RTX;
2128 /* Data type of the function. */
2129 tree funtype;
2130 /* Declaration of the function being called,
2131 or 0 if the function is computed (not known by name). */
2132 tree fndecl = 0;
2133 rtx insn;
2134 int try_tail_call = 1;
2135 int try_tail_recursion = 1;
2136 int pass;
2138 /* Register in which non-BLKmode value will be returned,
2139 or 0 if no value or if value is BLKmode. */
2140 rtx valreg;
2141 /* Address where we should return a BLKmode value;
2142 0 if value not BLKmode. */
2143 rtx structure_value_addr = 0;
2144 /* Nonzero if that address is being passed by treating it as
2145 an extra, implicit first parameter. Otherwise,
2146 it is passed by being copied directly into struct_value_rtx. */
2147 int structure_value_addr_parm = 0;
2148 /* Size of aggregate value wanted, or zero if none wanted
2149 or if we are using the non-reentrant PCC calling convention
2150 or expecting the value in registers. */
2151 HOST_WIDE_INT struct_value_size = 0;
2152 /* Nonzero if called function returns an aggregate in memory PCC style,
2153 by returning the address of where to find it. */
2154 int pcc_struct_value = 0;
2156 /* Number of actual parameters in this call, including struct value addr. */
2157 int num_actuals;
2158 /* Number of named args. Args after this are anonymous ones
2159 and they must all go on the stack. */
2160 int n_named_args;
2162 /* Vector of information about each argument.
2163 Arguments are numbered in the order they will be pushed,
2164 not the order they are written. */
2165 struct arg_data *args;
2167 /* Total size in bytes of all the stack-parms scanned so far. */
2168 struct args_size args_size;
2169 struct args_size adjusted_args_size;
2170 /* Size of arguments before any adjustments (such as rounding). */
2171 int unadjusted_args_size;
2172 /* Data on reg parms scanned so far. */
2173 CUMULATIVE_ARGS args_so_far;
2174 /* Nonzero if a reg parm has been scanned. */
2175 int reg_parm_seen;
2176 /* Nonzero if this is an indirect function call. */
2178 /* Nonzero if we must avoid push-insns in the args for this call.
2179 If stack space is allocated for register parameters, but not by the
2180 caller, then it is preallocated in the fixed part of the stack frame.
2181 So the entire argument block must then be preallocated (i.e., we
2182 ignore PUSH_ROUNDING in that case). */
2184 int must_preallocate = !PUSH_ARGS;
2186 /* Size of the stack reserved for parameter registers. */
2187 int reg_parm_stack_space = 0;
2189 /* Address of space preallocated for stack parms
2190 (on machines that lack push insns), or 0 if space not preallocated. */
2191 rtx argblock = 0;
2193 /* Mask of ECF_ flags. */
2194 int flags = 0;
2195 /* Nonzero if this is a call to an inline function. */
2196 int is_integrable = 0;
2197 #ifdef REG_PARM_STACK_SPACE
2198 /* Define the boundary of the register parm stack space that needs to be
2199 saved, if any. */
2200 int low_to_save, high_to_save;
2201 rtx save_area = 0; /* Place that it is saved */
2202 #endif
2204 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2205 char *initial_stack_usage_map = stack_usage_map;
2207 int old_stack_allocated;
2209 /* State variables to track stack modifications. */
2210 rtx old_stack_level = 0;
2211 int old_stack_arg_under_construction = 0;
2212 int old_pending_adj = 0;
2213 int old_inhibit_defer_pop = inhibit_defer_pop;
2215 /* Some stack pointer alterations we make are performed via
2216 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
2217 which we then also need to save/restore along the way. */
2218 int old_stack_pointer_delta = 0;
2220 rtx call_fusage;
2221 tree p = TREE_OPERAND (exp, 0);
2222 tree addr = TREE_OPERAND (exp, 0);
2223 int i;
2224 /* The alignment of the stack, in bits. */
2225 HOST_WIDE_INT preferred_stack_boundary;
2226 /* The alignment of the stack, in bytes. */
2227 HOST_WIDE_INT preferred_unit_stack_boundary;
2229 /* See if this is "nothrow" function call. */
2230 if (TREE_NOTHROW (exp))
2231 flags |= ECF_NOTHROW;
2233 /* See if we can find a DECL-node for the actual function.
2234 As a result, decide whether this is a call to an integrable function. */
2236 fndecl = get_callee_fndecl (exp);
2237 if (fndecl)
2239 if (!flag_no_inline
2240 && fndecl != current_function_decl
2241 && DECL_INLINE (fndecl)
2242 && DECL_SAVED_INSNS (fndecl)
2243 && DECL_SAVED_INSNS (fndecl)->inlinable)
2244 is_integrable = 1;
2245 else if (! TREE_ADDRESSABLE (fndecl))
2247 /* In case this function later becomes inlinable,
2248 record that there was already a non-inline call to it.
2250 Use abstraction instead of setting TREE_ADDRESSABLE
2251 directly. */
2252 if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
2253 && optimize > 0)
2255 warning_with_decl (fndecl, "can't inline call to `%s'");
2256 warning ("called from here");
2258 (*lang_hooks.mark_addressable) (fndecl);
2261 flags |= flags_from_decl_or_type (fndecl);
2264 /* If we don't have specific function to call, see if we have a
2265 attributes set in the type. */
2266 else
2267 flags |= flags_from_decl_or_type (TREE_TYPE (TREE_TYPE (p)));
2269 #ifdef REG_PARM_STACK_SPACE
2270 #ifdef MAYBE_REG_PARM_STACK_SPACE
2271 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2272 #else
2273 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2274 #endif
2275 #endif
2277 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2278 if (reg_parm_stack_space > 0 && PUSH_ARGS)
2279 must_preallocate = 1;
2280 #endif
2282 /* Warn if this value is an aggregate type,
2283 regardless of which calling convention we are using for it. */
2284 if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
2285 warning ("function call has aggregate value");
2287 /* Set up a place to return a structure. */
2289 /* Cater to broken compilers. */
2290 if (aggregate_value_p (exp))
2292 /* This call returns a big structure. */
2293 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
2295 #ifdef PCC_STATIC_STRUCT_RETURN
2297 pcc_struct_value = 1;
2298 /* Easier than making that case work right. */
2299 if (is_integrable)
2301 /* In case this is a static function, note that it has been
2302 used. */
2303 if (! TREE_ADDRESSABLE (fndecl))
2304 (*lang_hooks.mark_addressable) (fndecl);
2305 is_integrable = 0;
2308 #else /* not PCC_STATIC_STRUCT_RETURN */
2310 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2312 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (exp))
2314 /* The structure value address arg is already in actparms.
2315 Pull it out. It might be nice to just leave it there, but
2316 we need to set structure_value_addr. */
2317 tree return_arg = TREE_VALUE (actparms);
2318 actparms = TREE_CHAIN (actparms);
2319 structure_value_addr = expand_expr (return_arg, NULL_RTX,
2320 VOIDmode, EXPAND_NORMAL);
2322 else if (target && GET_CODE (target) == MEM)
2323 structure_value_addr = XEXP (target, 0);
2324 else
2326 /* For variable-sized objects, we must be called with a target
2327 specified. If we were to allocate space on the stack here,
2328 we would have no way of knowing when to free it. */
2329 rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
2331 mark_temp_addr_taken (d);
2332 structure_value_addr = XEXP (d, 0);
2333 target = 0;
2336 #endif /* not PCC_STATIC_STRUCT_RETURN */
2339 /* If called function is inline, try to integrate it. */
2341 if (is_integrable)
2343 rtx temp = try_to_integrate (fndecl, actparms, target,
2344 ignore, TREE_TYPE (exp),
2345 structure_value_addr);
2346 if (temp != (rtx) (size_t) - 1)
2347 return temp;
2350 /* Figure out the amount to which the stack should be aligned. */
2351 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2352 if (fndecl)
2354 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2355 if (i && i->preferred_incoming_stack_boundary)
2356 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2359 /* Operand 0 is a pointer-to-function; get the type of the function. */
2360 funtype = TREE_TYPE (addr);
2361 if (! POINTER_TYPE_P (funtype))
2362 abort ();
2363 funtype = TREE_TYPE (funtype);
2365 /* See if this is a call to a function that can return more than once
2366 or a call to longjmp or malloc. */
2367 flags |= special_function_p (fndecl, flags);
2369 if (flags & ECF_MAY_BE_ALLOCA)
2370 current_function_calls_alloca = 1;
2372 /* If struct_value_rtx is 0, it means pass the address
2373 as if it were an extra parameter. */
2374 if (structure_value_addr && struct_value_rtx == 0)
2376 /* If structure_value_addr is a REG other than
2377 virtual_outgoing_args_rtx, we can use always use it. If it
2378 is not a REG, we must always copy it into a register.
2379 If it is virtual_outgoing_args_rtx, we must copy it to another
2380 register in some cases. */
2381 rtx temp = (GET_CODE (structure_value_addr) != REG
2382 || (ACCUMULATE_OUTGOING_ARGS
2383 && stack_arg_under_construction
2384 && structure_value_addr == virtual_outgoing_args_rtx)
2385 ? copy_addr_to_reg (structure_value_addr)
2386 : structure_value_addr);
2388 actparms
2389 = tree_cons (error_mark_node,
2390 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2391 temp),
2392 actparms);
2393 structure_value_addr_parm = 1;
2396 /* Count the arguments and set NUM_ACTUALS. */
2397 for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2398 num_actuals++;
2400 /* Compute number of named args.
2401 Normally, don't include the last named arg if anonymous args follow.
2402 We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
2403 (If no anonymous args follow, the result of list_length is actually
2404 one too large. This is harmless.)
2406 If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
2407 zero, this machine will be able to place unnamed args that were
2408 passed in registers into the stack. So treat all args as named.
2409 This allows the insns emitting for a specific argument list to be
2410 independent of the function declaration.
2412 If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any
2413 reliable way to pass unnamed args in registers, so we must force
2414 them into memory. */
2416 if ((STRICT_ARGUMENT_NAMING
2417 || ! PRETEND_OUTGOING_VARARGS_NAMED)
2418 && TYPE_ARG_TYPES (funtype) != 0)
2419 n_named_args
2420 = (list_length (TYPE_ARG_TYPES (funtype))
2421 /* Don't include the last named arg. */
2422 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
2423 /* Count the struct value address, if it is passed as a parm. */
2424 + structure_value_addr_parm);
2425 else
2426 /* If we know nothing, treat all args as named. */
2427 n_named_args = num_actuals;
2429 /* Start updating where the next arg would go.
2431 On some machines (such as the PA) indirect calls have a different
2432 calling convention than normal calls. The last argument in
2433 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2434 or not. */
2435 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl);
2437 /* Make a vector to hold all the information about each arg. */
2438 args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
2439 memset ((char *) args, 0, num_actuals * sizeof (struct arg_data));
2441 /* Build up entries in the ARGS array, compute the size of the
2442 arguments into ARGS_SIZE, etc. */
2443 initialize_argument_information (num_actuals, args, &args_size,
2444 n_named_args, actparms, fndecl,
2445 &args_so_far, reg_parm_stack_space,
2446 &old_stack_level, &old_pending_adj,
2447 &must_preallocate, &flags);
2449 if (args_size.var)
2451 /* If this function requires a variable-sized argument list, don't
2452 try to make a cse'able block for this call. We may be able to
2453 do this eventually, but it is too complicated to keep track of
2454 what insns go in the cse'able block and which don't. */
2456 flags &= ~ECF_LIBCALL_BLOCK;
2457 must_preallocate = 1;
2460 /* Now make final decision about preallocating stack space. */
2461 must_preallocate = finalize_must_preallocate (must_preallocate,
2462 num_actuals, args,
2463 &args_size);
2465 /* If the structure value address will reference the stack pointer, we
2466 must stabilize it. We don't need to do this if we know that we are
2467 not going to adjust the stack pointer in processing this call. */
2469 if (structure_value_addr
2470 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2471 || reg_mentioned_p (virtual_outgoing_args_rtx,
2472 structure_value_addr))
2473 && (args_size.var
2474 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2475 structure_value_addr = copy_to_reg (structure_value_addr);
2477 /* Tail calls can make things harder to debug, and we're traditionally
2478 pushed these optimizations into -O2. Don't try if we're already
2479 expanding a call, as that means we're an argument. Don't try if
2480 there's cleanups, as we know there's code to follow the call.
2482 If rtx_equal_function_value_matters is false, that means we've
2483 finished with regular parsing. Which means that some of the
2484 machinery we use to generate tail-calls is no longer in place.
2485 This is most often true of sjlj-exceptions, which we couldn't
2486 tail-call to anyway. */
2488 if (currently_expanding_call++ != 0
2489 || !flag_optimize_sibling_calls
2490 || !rtx_equal_function_value_matters
2491 || any_pending_cleanups (1)
2492 || args_size.var)
2493 try_tail_call = try_tail_recursion = 0;
2495 /* Tail recursion fails, when we are not dealing with recursive calls. */
2496 if (!try_tail_recursion
2497 || TREE_CODE (addr) != ADDR_EXPR
2498 || TREE_OPERAND (addr, 0) != current_function_decl)
2499 try_tail_recursion = 0;
2501 /* Rest of purposes for tail call optimizations to fail. */
2502 if (
2503 #ifdef HAVE_sibcall_epilogue
2504 !HAVE_sibcall_epilogue
2505 #else
2507 #endif
2508 || !try_tail_call
2509 /* Doing sibling call optimization needs some work, since
2510 structure_value_addr can be allocated on the stack.
2511 It does not seem worth the effort since few optimizable
2512 sibling calls will return a structure. */
2513 || structure_value_addr != NULL_RTX
2514 /* Check whether the target is able to optimize the call
2515 into a sibcall. */
2516 || !(*targetm.function_ok_for_sibcall) (fndecl, exp)
2517 /* Functions that do not return exactly once may not be sibcall
2518 optimized. */
2519 || (flags & (ECF_RETURNS_TWICE | ECF_LONGJMP | ECF_NORETURN))
2520 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2521 /* If the called function is nested in the current one, it might access
2522 some of the caller's arguments, but could clobber them beforehand if
2523 the argument areas are shared. */
2524 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2525 /* If this function requires more stack slots than the current
2526 function, we cannot change it into a sibling call. */
2527 || args_size.constant > current_function_args_size
2528 /* If the callee pops its own arguments, then it must pop exactly
2529 the same number of arguments as the current function. */
2530 || RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2531 != RETURN_POPS_ARGS (current_function_decl,
2532 TREE_TYPE (current_function_decl),
2533 current_function_args_size))
2534 try_tail_call = 0;
2536 if (try_tail_call || try_tail_recursion)
2538 int end, inc;
2539 actparms = NULL_TREE;
2540 /* Ok, we're going to give the tail call the old college try.
2541 This means we're going to evaluate the function arguments
2542 up to three times. There are two degrees of badness we can
2543 encounter, those that can be unsaved and those that can't.
2544 (See unsafe_for_reeval commentary for details.)
2546 Generate a new argument list. Pass safe arguments through
2547 unchanged. For the easy badness wrap them in UNSAVE_EXPRs.
2548 For hard badness, evaluate them now and put their resulting
2549 rtx in a temporary VAR_DECL.
2551 initialize_argument_information has ordered the array for the
2552 order to be pushed, and we must remember this when reconstructing
2553 the original argument order. */
2555 if (PUSH_ARGS_REVERSED)
2557 inc = 1;
2558 i = 0;
2559 end = num_actuals;
2561 else
2563 inc = -1;
2564 i = num_actuals - 1;
2565 end = -1;
2568 for (; i != end; i += inc)
2570 args[i].tree_value = fix_unsafe_tree (args[i].tree_value);
2571 /* We need to build actparms for optimize_tail_recursion. We can
2572 safely trash away TREE_PURPOSE, since it is unused by this
2573 function. */
2574 if (try_tail_recursion)
2575 actparms = tree_cons (NULL_TREE, args[i].tree_value, actparms);
2577 /* Do the same for the function address if it is an expression. */
2578 if (!fndecl)
2579 addr = fix_unsafe_tree (addr);
2580 /* Expanding one of those dangerous arguments could have added
2581 cleanups, but otherwise give it a whirl. */
2582 if (any_pending_cleanups (1))
2583 try_tail_call = try_tail_recursion = 0;
2586 /* Generate a tail recursion sequence when calling ourselves. */
2588 if (try_tail_recursion)
2590 /* We want to emit any pending stack adjustments before the tail
2591 recursion "call". That way we know any adjustment after the tail
2592 recursion call can be ignored if we indeed use the tail recursion
2593 call expansion. */
2594 int save_pending_stack_adjust = pending_stack_adjust;
2595 int save_stack_pointer_delta = stack_pointer_delta;
2597 /* Emit any queued insns now; otherwise they would end up in
2598 only one of the alternates. */
2599 emit_queue ();
2601 /* Use a new sequence to hold any RTL we generate. We do not even
2602 know if we will use this RTL yet. The final decision can not be
2603 made until after RTL generation for the entire function is
2604 complete. */
2605 start_sequence ();
2606 /* If expanding any of the arguments creates cleanups, we can't
2607 do a tailcall. So, we'll need to pop the pending cleanups
2608 list. If, however, all goes well, and there are no cleanups
2609 then the call to expand_start_target_temps will have no
2610 effect. */
2611 expand_start_target_temps ();
2612 if (optimize_tail_recursion (actparms, get_last_insn ()))
2614 if (any_pending_cleanups (1))
2615 try_tail_call = try_tail_recursion = 0;
2616 else
2617 tail_recursion_insns = get_insns ();
2619 expand_end_target_temps ();
2620 end_sequence ();
2622 /* Restore the original pending stack adjustment for the sibling and
2623 normal call cases below. */
2624 pending_stack_adjust = save_pending_stack_adjust;
2625 stack_pointer_delta = save_stack_pointer_delta;
2628 if (profile_arc_flag && (flags & ECF_FORK_OR_EXEC))
2630 /* A fork duplicates the profile information, and an exec discards
2631 it. We can't rely on fork/exec to be paired. So write out the
2632 profile information we have gathered so far, and clear it. */
2633 /* ??? When Linux's __clone is called with CLONE_VM set, profiling
2634 is subject to race conditions, just as with multithreaded
2635 programs. */
2637 emit_library_call (gcov_flush_libfunc, LCT_ALWAYS_RETURN, VOIDmode, 0);
2640 /* Ensure current function's preferred stack boundary is at least
2641 what we need. We don't have to increase alignment for recursive
2642 functions. */
2643 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2644 && fndecl != current_function_decl)
2645 cfun->preferred_stack_boundary = preferred_stack_boundary;
2646 if (fndecl == current_function_decl)
2647 cfun->recursive_call_emit = true;
2649 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2651 function_call_count++;
2653 /* We want to make two insn chains; one for a sibling call, the other
2654 for a normal call. We will select one of the two chains after
2655 initial RTL generation is complete. */
2656 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2658 int sibcall_failure = 0;
2659 /* We want to emit any pending stack adjustments before the tail
2660 recursion "call". That way we know any adjustment after the tail
2661 recursion call can be ignored if we indeed use the tail recursion
2662 call expansion. */
2663 int save_pending_stack_adjust = 0;
2664 int save_stack_pointer_delta = 0;
2665 rtx insns;
2666 rtx before_call, next_arg_reg;
2668 if (pass == 0)
2670 /* Emit any queued insns now; otherwise they would end up in
2671 only one of the alternates. */
2672 emit_queue ();
2674 /* State variables we need to save and restore between
2675 iterations. */
2676 save_pending_stack_adjust = pending_stack_adjust;
2677 save_stack_pointer_delta = stack_pointer_delta;
2679 if (pass)
2680 flags &= ~ECF_SIBCALL;
2681 else
2682 flags |= ECF_SIBCALL;
2684 /* Other state variables that we must reinitialize each time
2685 through the loop (that are not initialized by the loop itself). */
2686 argblock = 0;
2687 call_fusage = 0;
2689 /* Start a new sequence for the normal call case.
2691 From this point on, if the sibling call fails, we want to set
2692 sibcall_failure instead of continuing the loop. */
2693 start_sequence ();
2695 if (pass == 0)
2697 /* We know at this point that there are not currently any
2698 pending cleanups. If, however, in the process of evaluating
2699 the arguments we were to create some, we'll need to be
2700 able to get rid of them. */
2701 expand_start_target_temps ();
2704 /* Don't let pending stack adjusts add up to too much.
2705 Also, do all pending adjustments now if there is any chance
2706 this might be a call to alloca or if we are expanding a sibling
2707 call sequence or if we are calling a function that is to return
2708 with stack pointer depressed. */
2709 if (pending_stack_adjust >= 32
2710 || (pending_stack_adjust > 0
2711 && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
2712 || pass == 0)
2713 do_pending_stack_adjust ();
2715 /* When calling a const function, we must pop the stack args right away,
2716 so that the pop is deleted or moved with the call. */
2717 if (pass && (flags & ECF_LIBCALL_BLOCK))
2718 NO_DEFER_POP;
2720 #ifdef FINAL_REG_PARM_STACK_SPACE
2721 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2722 args_size.var);
2723 #endif
2724 /* Precompute any arguments as needed. */
2725 if (pass)
2726 precompute_arguments (flags, num_actuals, args);
2728 /* Now we are about to start emitting insns that can be deleted
2729 if a libcall is deleted. */
2730 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
2731 start_sequence ();
2733 adjusted_args_size = args_size;
2734 /* Compute the actual size of the argument block required. The variable
2735 and constant sizes must be combined, the size may have to be rounded,
2736 and there may be a minimum required size. When generating a sibcall
2737 pattern, do not round up, since we'll be re-using whatever space our
2738 caller provided. */
2739 unadjusted_args_size
2740 = compute_argument_block_size (reg_parm_stack_space,
2741 &adjusted_args_size,
2742 (pass == 0 ? 0
2743 : preferred_stack_boundary));
2745 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2747 /* The argument block when performing a sibling call is the
2748 incoming argument block. */
2749 if (pass == 0)
2751 argblock = virtual_incoming_args_rtx;
2752 argblock
2753 #ifdef STACK_GROWS_DOWNWARD
2754 = plus_constant (argblock, current_function_pretend_args_size);
2755 #else
2756 = plus_constant (argblock, -current_function_pretend_args_size);
2757 #endif
2758 stored_args_map = sbitmap_alloc (args_size.constant);
2759 sbitmap_zero (stored_args_map);
2762 /* If we have no actual push instructions, or shouldn't use them,
2763 make space for all args right now. */
2764 else if (adjusted_args_size.var != 0)
2766 if (old_stack_level == 0)
2768 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2769 old_stack_pointer_delta = stack_pointer_delta;
2770 old_pending_adj = pending_stack_adjust;
2771 pending_stack_adjust = 0;
2772 /* stack_arg_under_construction says whether a stack arg is
2773 being constructed at the old stack level. Pushing the stack
2774 gets a clean outgoing argument block. */
2775 old_stack_arg_under_construction = stack_arg_under_construction;
2776 stack_arg_under_construction = 0;
2778 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2780 else
2782 /* Note that we must go through the motions of allocating an argument
2783 block even if the size is zero because we may be storing args
2784 in the area reserved for register arguments, which may be part of
2785 the stack frame. */
2787 int needed = adjusted_args_size.constant;
2789 /* Store the maximum argument space used. It will be pushed by
2790 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2791 checking). */
2793 if (needed > current_function_outgoing_args_size)
2794 current_function_outgoing_args_size = needed;
2796 if (must_preallocate)
2798 if (ACCUMULATE_OUTGOING_ARGS)
2800 /* Since the stack pointer will never be pushed, it is
2801 possible for the evaluation of a parm to clobber
2802 something we have already written to the stack.
2803 Since most function calls on RISC machines do not use
2804 the stack, this is uncommon, but must work correctly.
2806 Therefore, we save any area of the stack that was already
2807 written and that we are using. Here we set up to do this
2808 by making a new stack usage map from the old one. The
2809 actual save will be done by store_one_arg.
2811 Another approach might be to try to reorder the argument
2812 evaluations to avoid this conflicting stack usage. */
2814 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2815 /* Since we will be writing into the entire argument area,
2816 the map must be allocated for its entire size, not just
2817 the part that is the responsibility of the caller. */
2818 needed += reg_parm_stack_space;
2819 #endif
2821 #ifdef ARGS_GROW_DOWNWARD
2822 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2823 needed + 1);
2824 #else
2825 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2826 needed);
2827 #endif
2828 stack_usage_map
2829 = (char *) alloca (highest_outgoing_arg_in_use);
2831 if (initial_highest_arg_in_use)
2832 memcpy (stack_usage_map, initial_stack_usage_map,
2833 initial_highest_arg_in_use);
2835 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2836 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2837 (highest_outgoing_arg_in_use
2838 - initial_highest_arg_in_use));
2839 needed = 0;
2841 /* The address of the outgoing argument list must not be
2842 copied to a register here, because argblock would be left
2843 pointing to the wrong place after the call to
2844 allocate_dynamic_stack_space below. */
2846 argblock = virtual_outgoing_args_rtx;
2848 else
2850 if (inhibit_defer_pop == 0)
2852 /* Try to reuse some or all of the pending_stack_adjust
2853 to get this space. */
2854 needed
2855 = (combine_pending_stack_adjustment_and_call
2856 (unadjusted_args_size,
2857 &adjusted_args_size,
2858 preferred_unit_stack_boundary));
2860 /* combine_pending_stack_adjustment_and_call computes
2861 an adjustment before the arguments are allocated.
2862 Account for them and see whether or not the stack
2863 needs to go up or down. */
2864 needed = unadjusted_args_size - needed;
2866 if (needed < 0)
2868 /* We're releasing stack space. */
2869 /* ??? We can avoid any adjustment at all if we're
2870 already aligned. FIXME. */
2871 pending_stack_adjust = -needed;
2872 do_pending_stack_adjust ();
2873 needed = 0;
2875 else
2876 /* We need to allocate space. We'll do that in
2877 push_block below. */
2878 pending_stack_adjust = 0;
2881 /* Special case this because overhead of `push_block' in
2882 this case is non-trivial. */
2883 if (needed == 0)
2884 argblock = virtual_outgoing_args_rtx;
2885 else
2886 argblock = push_block (GEN_INT (needed), 0, 0);
2888 /* We only really need to call `copy_to_reg' in the case
2889 where push insns are going to be used to pass ARGBLOCK
2890 to a function call in ARGS. In that case, the stack
2891 pointer changes value from the allocation point to the
2892 call point, and hence the value of
2893 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2894 as well always do it. */
2895 argblock = copy_to_reg (argblock);
2900 if (ACCUMULATE_OUTGOING_ARGS)
2902 /* The save/restore code in store_one_arg handles all
2903 cases except one: a constructor call (including a C
2904 function returning a BLKmode struct) to initialize
2905 an argument. */
2906 if (stack_arg_under_construction)
2908 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2909 rtx push_size = GEN_INT (reg_parm_stack_space
2910 + adjusted_args_size.constant);
2911 #else
2912 rtx push_size = GEN_INT (adjusted_args_size.constant);
2913 #endif
2914 if (old_stack_level == 0)
2916 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2917 NULL_RTX);
2918 old_stack_pointer_delta = stack_pointer_delta;
2919 old_pending_adj = pending_stack_adjust;
2920 pending_stack_adjust = 0;
2921 /* stack_arg_under_construction says whether a stack
2922 arg is being constructed at the old stack level.
2923 Pushing the stack gets a clean outgoing argument
2924 block. */
2925 old_stack_arg_under_construction
2926 = stack_arg_under_construction;
2927 stack_arg_under_construction = 0;
2928 /* Make a new map for the new argument list. */
2929 stack_usage_map = (char *)
2930 alloca (highest_outgoing_arg_in_use);
2931 memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
2932 highest_outgoing_arg_in_use = 0;
2934 allocate_dynamic_stack_space (push_size, NULL_RTX,
2935 BITS_PER_UNIT);
2938 /* If argument evaluation might modify the stack pointer,
2939 copy the address of the argument list to a register. */
2940 for (i = 0; i < num_actuals; i++)
2941 if (args[i].pass_on_stack)
2943 argblock = copy_addr_to_reg (argblock);
2944 break;
2948 compute_argument_addresses (args, argblock, num_actuals);
2950 /* If we push args individually in reverse order, perform stack alignment
2951 before the first push (the last arg). */
2952 if (PUSH_ARGS_REVERSED && argblock == 0
2953 && adjusted_args_size.constant != unadjusted_args_size)
2955 /* When the stack adjustment is pending, we get better code
2956 by combining the adjustments. */
2957 if (pending_stack_adjust
2958 && ! (flags & ECF_LIBCALL_BLOCK)
2959 && ! inhibit_defer_pop)
2961 pending_stack_adjust
2962 = (combine_pending_stack_adjustment_and_call
2963 (unadjusted_args_size,
2964 &adjusted_args_size,
2965 preferred_unit_stack_boundary));
2966 do_pending_stack_adjust ();
2968 else if (argblock == 0)
2969 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2970 - unadjusted_args_size));
2972 /* Now that the stack is properly aligned, pops can't safely
2973 be deferred during the evaluation of the arguments. */
2974 NO_DEFER_POP;
2976 funexp = rtx_for_function_call (fndecl, addr);
2978 /* Figure out the register where the value, if any, will come back. */
2979 valreg = 0;
2980 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2981 && ! structure_value_addr)
2983 if (pcc_struct_value)
2984 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2985 fndecl, (pass == 0));
2986 else
2987 valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
2990 /* Precompute all register parameters. It isn't safe to compute anything
2991 once we have started filling any specific hard regs. */
2992 precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2994 #ifdef REG_PARM_STACK_SPACE
2995 /* Save the fixed argument area if it's part of the caller's frame and
2996 is clobbered by argument setup for this call. */
2997 if (ACCUMULATE_OUTGOING_ARGS && pass)
2998 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2999 &low_to_save, &high_to_save);
3000 #endif
3002 /* Now store (and compute if necessary) all non-register parms.
3003 These come before register parms, since they can require block-moves,
3004 which could clobber the registers used for register parms.
3005 Parms which have partial registers are not stored here,
3006 but we do preallocate space here if they want that. */
3008 for (i = 0; i < num_actuals; i++)
3009 if (args[i].reg == 0 || args[i].pass_on_stack)
3011 rtx before_arg = get_last_insn ();
3013 if (store_one_arg (&args[i], argblock, flags,
3014 adjusted_args_size.var != 0,
3015 reg_parm_stack_space)
3016 || (pass == 0
3017 && check_sibcall_argument_overlap (before_arg,
3018 &args[i], 1)))
3019 sibcall_failure = 1;
3022 /* If we have a parm that is passed in registers but not in memory
3023 and whose alignment does not permit a direct copy into registers,
3024 make a group of pseudos that correspond to each register that we
3025 will later fill. */
3026 if (STRICT_ALIGNMENT)
3027 store_unaligned_arguments_into_pseudos (args, num_actuals);
3029 /* Now store any partially-in-registers parm.
3030 This is the last place a block-move can happen. */
3031 if (reg_parm_seen)
3032 for (i = 0; i < num_actuals; i++)
3033 if (args[i].partial != 0 && ! args[i].pass_on_stack)
3035 rtx before_arg = get_last_insn ();
3037 if (store_one_arg (&args[i], argblock, flags,
3038 adjusted_args_size.var != 0,
3039 reg_parm_stack_space)
3040 || (pass == 0
3041 && check_sibcall_argument_overlap (before_arg,
3042 &args[i], 1)))
3043 sibcall_failure = 1;
3046 /* If we pushed args in forward order, perform stack alignment
3047 after pushing the last arg. */
3048 if (!PUSH_ARGS_REVERSED && argblock == 0)
3049 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
3050 - unadjusted_args_size));
3052 /* If register arguments require space on the stack and stack space
3053 was not preallocated, allocate stack space here for arguments
3054 passed in registers. */
3055 #ifdef OUTGOING_REG_PARM_STACK_SPACE
3056 if (!ACCUMULATE_OUTGOING_ARGS
3057 && must_preallocate == 0 && reg_parm_stack_space > 0)
3058 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
3059 #endif
3061 /* Pass the function the address in which to return a
3062 structure value. */
3063 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3065 emit_move_insn (struct_value_rtx,
3066 force_reg (Pmode,
3067 force_operand (structure_value_addr,
3068 NULL_RTX)));
3070 if (GET_CODE (struct_value_rtx) == REG)
3071 use_reg (&call_fusage, struct_value_rtx);
3074 funexp = prepare_call_address (funexp, fndecl, &call_fusage,
3075 reg_parm_seen, pass == 0);
3077 load_register_parameters (args, num_actuals, &call_fusage, flags,
3078 pass == 0, &sibcall_failure);
3080 /* Perform postincrements before actually calling the function. */
3081 emit_queue ();
3083 /* Save a pointer to the last insn before the call, so that we can
3084 later safely search backwards to find the CALL_INSN. */
3085 before_call = get_last_insn ();
3087 /* Set up next argument register. For sibling calls on machines
3088 with register windows this should be the incoming register. */
3089 #ifdef FUNCTION_INCOMING_ARG
3090 if (pass == 0)
3091 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
3092 void_type_node, 1);
3093 else
3094 #endif
3095 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
3096 void_type_node, 1);
3098 /* All arguments and registers used for the call must be set up by
3099 now! */
3101 /* Stack must be properly aligned now. */
3102 if (pass && stack_pointer_delta % preferred_unit_stack_boundary)
3103 abort ();
3105 /* Generate the actual call instruction. */
3106 emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
3107 adjusted_args_size.constant, struct_value_size,
3108 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
3109 flags, & args_so_far);
3111 /* If call is cse'able, make appropriate pair of reg-notes around it.
3112 Test valreg so we don't crash; may safely ignore `const'
3113 if return type is void. Disable for PARALLEL return values, because
3114 we have no way to move such values into a pseudo register. */
3115 if (pass && (flags & ECF_LIBCALL_BLOCK))
3117 rtx insns;
3119 if (valreg == 0 || GET_CODE (valreg) == PARALLEL)
3121 insns = get_insns ();
3122 end_sequence ();
3123 emit_insn (insns);
3125 else
3127 rtx note = 0;
3128 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3130 /* Mark the return value as a pointer if needed. */
3131 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3132 mark_reg_pointer (temp,
3133 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
3135 /* Construct an "equal form" for the value which mentions all the
3136 arguments in order as well as the function name. */
3137 for (i = 0; i < num_actuals; i++)
3138 note = gen_rtx_EXPR_LIST (VOIDmode,
3139 args[i].initial_value, note);
3140 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
3142 insns = get_insns ();
3143 end_sequence ();
3145 if (flags & ECF_PURE)
3146 note = gen_rtx_EXPR_LIST (VOIDmode,
3147 gen_rtx_USE (VOIDmode,
3148 gen_rtx_MEM (BLKmode,
3149 gen_rtx_SCRATCH (VOIDmode))),
3150 note);
3152 emit_libcall_block (insns, temp, valreg, note);
3154 valreg = temp;
3157 else if (pass && (flags & ECF_MALLOC))
3159 rtx temp = gen_reg_rtx (GET_MODE (valreg));
3160 rtx last, insns;
3162 /* The return value from a malloc-like function is a pointer. */
3163 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3164 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
3166 emit_move_insn (temp, valreg);
3168 /* The return value from a malloc-like function can not alias
3169 anything else. */
3170 last = get_last_insn ();
3171 REG_NOTES (last) =
3172 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
3174 /* Write out the sequence. */
3175 insns = get_insns ();
3176 end_sequence ();
3177 emit_insn (insns);
3178 valreg = temp;
3181 /* For calls to `setjmp', etc., inform flow.c it should complain
3182 if nonvolatile values are live. For functions that cannot return,
3183 inform flow that control does not fall through. */
3185 if ((flags & (ECF_NORETURN | ECF_LONGJMP)) || pass == 0)
3187 /* The barrier must be emitted
3188 immediately after the CALL_INSN. Some ports emit more
3189 than just a CALL_INSN above, so we must search for it here. */
3191 rtx last = get_last_insn ();
3192 while (GET_CODE (last) != CALL_INSN)
3194 last = PREV_INSN (last);
3195 /* There was no CALL_INSN? */
3196 if (last == before_call)
3197 abort ();
3200 emit_barrier_after (last);
3203 if (flags & ECF_LONGJMP)
3204 current_function_calls_longjmp = 1;
3206 /* If this function is returning into a memory location marked as
3207 readonly, it means it is initializing that location. But we normally
3208 treat functions as not clobbering such locations, so we need to
3209 specify that this one does. */
3210 if (target != 0 && GET_CODE (target) == MEM
3211 && structure_value_addr != 0 && RTX_UNCHANGING_P (target))
3212 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3214 /* If value type not void, return an rtx for the value. */
3216 /* If there are cleanups to be called, don't use a hard reg as target.
3217 We need to double check this and see if it matters anymore. */
3218 if (any_pending_cleanups (1))
3220 if (target && REG_P (target)
3221 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3222 target = 0;
3223 sibcall_failure = 1;
3226 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
3227 || ignore)
3228 target = const0_rtx;
3229 else if (structure_value_addr)
3231 if (target == 0 || GET_CODE (target) != MEM)
3233 target
3234 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3235 memory_address (TYPE_MODE (TREE_TYPE (exp)),
3236 structure_value_addr));
3237 set_mem_attributes (target, exp, 1);
3240 else if (pcc_struct_value)
3242 /* This is the special C++ case where we need to
3243 know what the true target was. We take care to
3244 never use this value more than once in one expression. */
3245 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3246 copy_to_reg (valreg));
3247 set_mem_attributes (target, exp, 1);
3249 /* Handle calls that return values in multiple non-contiguous locations.
3250 The Irix 6 ABI has examples of this. */
3251 else if (GET_CODE (valreg) == PARALLEL)
3253 if (target == 0)
3255 /* This will only be assigned once, so it can be readonly. */
3256 tree nt = build_qualified_type (TREE_TYPE (exp),
3257 (TYPE_QUALS (TREE_TYPE (exp))
3258 | TYPE_QUAL_CONST));
3260 target = assign_temp (nt, 0, 1, 1);
3261 preserve_temp_slots (target);
3264 if (! rtx_equal_p (target, valreg))
3265 emit_group_store (target, valreg,
3266 int_size_in_bytes (TREE_TYPE (exp)));
3268 /* We can not support sibling calls for this case. */
3269 sibcall_failure = 1;
3271 else if (target
3272 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
3273 && GET_MODE (target) == GET_MODE (valreg))
3275 /* TARGET and VALREG cannot be equal at this point because the
3276 latter would not have REG_FUNCTION_VALUE_P true, while the
3277 former would if it were referring to the same register.
3279 If they refer to the same register, this move will be a no-op,
3280 except when function inlining is being done. */
3281 emit_move_insn (target, valreg);
3283 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
3285 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
3287 /* We can not support sibling calls for this case. */
3288 sibcall_failure = 1;
3290 else
3291 target = copy_to_reg (valreg);
3293 #ifdef PROMOTE_FUNCTION_RETURN
3294 /* If we promoted this return value, make the proper SUBREG. TARGET
3295 might be const0_rtx here, so be careful. */
3296 if (GET_CODE (target) == REG
3297 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3298 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3300 tree type = TREE_TYPE (exp);
3301 int unsignedp = TREE_UNSIGNED (type);
3302 int offset = 0;
3304 /* If we don't promote as expected, something is wrong. */
3305 if (GET_MODE (target)
3306 != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
3307 abort ();
3309 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3310 && GET_MODE_SIZE (GET_MODE (target))
3311 > GET_MODE_SIZE (TYPE_MODE (type)))
3313 offset = GET_MODE_SIZE (GET_MODE (target))
3314 - GET_MODE_SIZE (TYPE_MODE (type));
3315 if (! BYTES_BIG_ENDIAN)
3316 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3317 else if (! WORDS_BIG_ENDIAN)
3318 offset %= UNITS_PER_WORD;
3320 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3321 SUBREG_PROMOTED_VAR_P (target) = 1;
3322 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3324 #endif
3326 /* If size of args is variable or this was a constructor call for a stack
3327 argument, restore saved stack-pointer value. */
3329 if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
3331 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3332 stack_pointer_delta = old_stack_pointer_delta;
3333 pending_stack_adjust = old_pending_adj;
3334 stack_arg_under_construction = old_stack_arg_under_construction;
3335 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3336 stack_usage_map = initial_stack_usage_map;
3337 sibcall_failure = 1;
3339 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3341 #ifdef REG_PARM_STACK_SPACE
3342 if (save_area)
3343 restore_fixed_argument_area (save_area, argblock,
3344 high_to_save, low_to_save);
3345 #endif
3347 /* If we saved any argument areas, restore them. */
3348 for (i = 0; i < num_actuals; i++)
3349 if (args[i].save_area)
3351 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3352 rtx stack_area
3353 = gen_rtx_MEM (save_mode,
3354 memory_address (save_mode,
3355 XEXP (args[i].stack_slot, 0)));
3357 if (save_mode != BLKmode)
3358 emit_move_insn (stack_area, args[i].save_area);
3359 else
3360 emit_block_move (stack_area, args[i].save_area,
3361 GEN_INT (args[i].size.constant),
3362 BLOCK_OP_CALL_PARM);
3365 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3366 stack_usage_map = initial_stack_usage_map;
3369 /* If this was alloca, record the new stack level for nonlocal gotos.
3370 Check for the handler slots since we might not have a save area
3371 for non-local gotos. */
3373 if ((flags & ECF_MAY_BE_ALLOCA) && nonlocal_goto_handler_slots != 0)
3374 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
3376 /* Free up storage we no longer need. */
3377 for (i = 0; i < num_actuals; ++i)
3378 if (args[i].aligned_regs)
3379 free (args[i].aligned_regs);
3381 if (pass == 0)
3383 /* Undo the fake expand_start_target_temps we did earlier. If
3384 there had been any cleanups created, we've already set
3385 sibcall_failure. */
3386 expand_end_target_temps ();
3389 insns = get_insns ();
3390 end_sequence ();
3392 if (pass == 0)
3394 tail_call_insns = insns;
3396 /* Restore the pending stack adjustment now that we have
3397 finished generating the sibling call sequence. */
3399 pending_stack_adjust = save_pending_stack_adjust;
3400 stack_pointer_delta = save_stack_pointer_delta;
3402 /* Prepare arg structure for next iteration. */
3403 for (i = 0; i < num_actuals; i++)
3405 args[i].value = 0;
3406 args[i].aligned_regs = 0;
3407 args[i].stack = 0;
3410 sbitmap_free (stored_args_map);
3412 else
3414 normal_call_insns = insns;
3416 /* Verify that we've deallocated all the stack we used. */
3417 if (old_stack_allocated !=
3418 stack_pointer_delta - pending_stack_adjust)
3419 abort ();
3422 /* If something prevents making this a sibling call,
3423 zero out the sequence. */
3424 if (sibcall_failure)
3425 tail_call_insns = NULL_RTX;
3428 /* The function optimize_sibling_and_tail_recursive_calls doesn't
3429 handle CALL_PLACEHOLDERs inside other CALL_PLACEHOLDERs. This
3430 can happen if the arguments to this function call an inline
3431 function who's expansion contains another CALL_PLACEHOLDER.
3433 If there are any C_Ps in any of these sequences, replace them
3434 with their normal call. */
3436 for (insn = normal_call_insns; insn; insn = NEXT_INSN (insn))
3437 if (GET_CODE (insn) == CALL_INSN
3438 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3439 replace_call_placeholder (insn, sibcall_use_normal);
3441 for (insn = tail_call_insns; insn; insn = NEXT_INSN (insn))
3442 if (GET_CODE (insn) == CALL_INSN
3443 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3444 replace_call_placeholder (insn, sibcall_use_normal);
3446 for (insn = tail_recursion_insns; insn; insn = NEXT_INSN (insn))
3447 if (GET_CODE (insn) == CALL_INSN
3448 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3449 replace_call_placeholder (insn, sibcall_use_normal);
3451 /* If this was a potential tail recursion site, then emit a
3452 CALL_PLACEHOLDER with the normal and the tail recursion streams.
3453 One of them will be selected later. */
3454 if (tail_recursion_insns || tail_call_insns)
3456 /* The tail recursion label must be kept around. We could expose
3457 its use in the CALL_PLACEHOLDER, but that creates unwanted edges
3458 and makes determining true tail recursion sites difficult.
3460 So we set LABEL_PRESERVE_P here, then clear it when we select
3461 one of the call sequences after rtl generation is complete. */
3462 if (tail_recursion_insns)
3463 LABEL_PRESERVE_P (tail_recursion_label) = 1;
3464 emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode, normal_call_insns,
3465 tail_call_insns,
3466 tail_recursion_insns,
3467 tail_recursion_label));
3469 else
3470 emit_insn (normal_call_insns);
3472 currently_expanding_call--;
3474 /* If this function returns with the stack pointer depressed, ensure
3475 this block saves and restores the stack pointer, show it was
3476 changed, and adjust for any outgoing arg space. */
3477 if (flags & ECF_SP_DEPRESSED)
3479 clear_pending_stack_adjust ();
3480 emit_insn (gen_rtx (CLOBBER, VOIDmode, stack_pointer_rtx));
3481 emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3482 save_stack_pointer ();
3485 return target;
3488 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3489 The RETVAL parameter specifies whether return value needs to be saved, other
3490 parameters are documented in the emit_library_call function below. */
3492 static rtx
3493 emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p)
3494 int retval;
3495 rtx orgfun;
3496 rtx value;
3497 enum libcall_type fn_type;
3498 enum machine_mode outmode;
3499 int nargs;
3500 va_list p;
3502 /* Total size in bytes of all the stack-parms scanned so far. */
3503 struct args_size args_size;
3504 /* Size of arguments before any adjustments (such as rounding). */
3505 struct args_size original_args_size;
3506 int argnum;
3507 rtx fun;
3508 int inc;
3509 int count;
3510 struct args_size alignment_pad;
3511 rtx argblock = 0;
3512 CUMULATIVE_ARGS args_so_far;
3513 struct arg
3515 rtx value;
3516 enum machine_mode mode;
3517 rtx reg;
3518 int partial;
3519 struct args_size offset;
3520 struct args_size size;
3521 rtx save_area;
3523 struct arg *argvec;
3524 int old_inhibit_defer_pop = inhibit_defer_pop;
3525 rtx call_fusage = 0;
3526 rtx mem_value = 0;
3527 rtx valreg;
3528 int pcc_struct_value = 0;
3529 int struct_value_size = 0;
3530 int flags;
3531 int reg_parm_stack_space = 0;
3532 int needed;
3533 rtx before_call;
3534 tree tfom; /* type_for_mode (outmode, 0) */
3536 #ifdef REG_PARM_STACK_SPACE
3537 /* Define the boundary of the register parm stack space that needs to be
3538 save, if any. */
3539 int low_to_save, high_to_save;
3540 rtx save_area = 0; /* Place that it is saved. */
3541 #endif
3543 /* Size of the stack reserved for parameter registers. */
3544 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3545 char *initial_stack_usage_map = stack_usage_map;
3547 #ifdef REG_PARM_STACK_SPACE
3548 #ifdef MAYBE_REG_PARM_STACK_SPACE
3549 reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3550 #else
3551 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3552 #endif
3553 #endif
3555 /* By default, library functions can not throw. */
3556 flags = ECF_NOTHROW;
3558 switch (fn_type)
3560 case LCT_NORMAL:
3561 break;
3562 case LCT_CONST:
3563 flags |= ECF_CONST;
3564 break;
3565 case LCT_PURE:
3566 flags |= ECF_PURE;
3567 break;
3568 case LCT_CONST_MAKE_BLOCK:
3569 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
3570 break;
3571 case LCT_PURE_MAKE_BLOCK:
3572 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
3573 break;
3574 case LCT_NORETURN:
3575 flags |= ECF_NORETURN;
3576 break;
3577 case LCT_THROW:
3578 flags = ECF_NORETURN;
3579 break;
3580 case LCT_ALWAYS_RETURN:
3581 flags = ECF_ALWAYS_RETURN;
3582 break;
3583 case LCT_RETURNS_TWICE:
3584 flags = ECF_RETURNS_TWICE;
3585 break;
3587 fun = orgfun;
3589 /* Ensure current function's preferred stack boundary is at least
3590 what we need. */
3591 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3592 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3594 /* If this kind of value comes back in memory,
3595 decide where in memory it should come back. */
3596 if (outmode != VOIDmode)
3598 tfom = (*lang_hooks.types.type_for_mode) (outmode, 0);
3599 if (aggregate_value_p (tfom))
3601 #ifdef PCC_STATIC_STRUCT_RETURN
3602 rtx pointer_reg
3603 = hard_function_value (build_pointer_type (tfom), 0, 0);
3604 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3605 pcc_struct_value = 1;
3606 if (value == 0)
3607 value = gen_reg_rtx (outmode);
3608 #else /* not PCC_STATIC_STRUCT_RETURN */
3609 struct_value_size = GET_MODE_SIZE (outmode);
3610 if (value != 0 && GET_CODE (value) == MEM)
3611 mem_value = value;
3612 else
3613 mem_value = assign_temp (tfom, 0, 1, 1);
3614 #endif
3615 /* This call returns a big structure. */
3616 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3619 else
3620 tfom = void_type_node;
3622 /* ??? Unfinished: must pass the memory address as an argument. */
3624 /* Copy all the libcall-arguments out of the varargs data
3625 and into a vector ARGVEC.
3627 Compute how to pass each argument. We only support a very small subset
3628 of the full argument passing conventions to limit complexity here since
3629 library functions shouldn't have many args. */
3631 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
3632 memset ((char *) argvec, 0, (nargs + 1) * sizeof (struct arg));
3634 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3635 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3636 #else
3637 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3638 #endif
3640 args_size.constant = 0;
3641 args_size.var = 0;
3643 count = 0;
3645 /* Now we are about to start emitting insns that can be deleted
3646 if a libcall is deleted. */
3647 if (flags & ECF_LIBCALL_BLOCK)
3648 start_sequence ();
3650 push_temp_slots ();
3652 /* If there's a structure value address to be passed,
3653 either pass it in the special place, or pass it as an extra argument. */
3654 if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3656 rtx addr = XEXP (mem_value, 0);
3657 nargs++;
3659 /* Make sure it is a reasonable operand for a move or push insn. */
3660 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3661 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3662 addr = force_operand (addr, NULL_RTX);
3664 argvec[count].value = addr;
3665 argvec[count].mode = Pmode;
3666 argvec[count].partial = 0;
3668 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3669 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3670 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3671 abort ();
3672 #endif
3674 locate_and_pad_parm (Pmode, NULL_TREE,
3675 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3677 #else
3678 argvec[count].reg != 0,
3679 #endif
3680 NULL_TREE, &args_size, &argvec[count].offset,
3681 &argvec[count].size, &alignment_pad);
3683 if (argvec[count].reg == 0 || argvec[count].partial != 0
3684 || reg_parm_stack_space > 0)
3685 args_size.constant += argvec[count].size.constant;
3687 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3689 count++;
3692 for (; count < nargs; count++)
3694 rtx val = va_arg (p, rtx);
3695 enum machine_mode mode = va_arg (p, enum machine_mode);
3697 /* We cannot convert the arg value to the mode the library wants here;
3698 must do it earlier where we know the signedness of the arg. */
3699 if (mode == BLKmode
3700 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3701 abort ();
3703 /* On some machines, there's no way to pass a float to a library fcn.
3704 Pass it as a double instead. */
3705 #ifdef LIBGCC_NEEDS_DOUBLE
3706 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3707 val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3708 #endif
3710 /* There's no need to call protect_from_queue, because
3711 either emit_move_insn or emit_push_insn will do that. */
3713 /* Make sure it is a reasonable operand for a move or push insn. */
3714 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3715 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3716 val = force_operand (val, NULL_RTX);
3718 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3719 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3721 rtx slot;
3722 int must_copy = 1
3723 #ifdef FUNCTION_ARG_CALLEE_COPIES
3724 && ! FUNCTION_ARG_CALLEE_COPIES (args_so_far, mode,
3725 NULL_TREE, 1)
3726 #endif
3729 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3730 functions, so we have to pretend this isn't such a function. */
3731 if (flags & ECF_LIBCALL_BLOCK)
3733 rtx insns = get_insns ();
3734 end_sequence ();
3735 emit_insn (insns);
3737 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3739 /* If this was a CONST function, it is now PURE since
3740 it now reads memory. */
3741 if (flags & ECF_CONST)
3743 flags &= ~ECF_CONST;
3744 flags |= ECF_PURE;
3747 if (GET_MODE (val) == MEM && ! must_copy)
3748 slot = val;
3749 else if (must_copy)
3751 slot = assign_temp ((*lang_hooks.types.type_for_mode) (mode, 0),
3752 0, 1, 1);
3753 emit_move_insn (slot, val);
3755 else
3757 tree type = (*lang_hooks.types.type_for_mode) (mode, 0);
3759 slot
3760 = gen_rtx_MEM (mode,
3761 expand_expr (build1 (ADDR_EXPR,
3762 build_pointer_type (type),
3763 make_tree (type, val)),
3764 NULL_RTX, VOIDmode, 0));
3767 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3768 gen_rtx_USE (VOIDmode, slot),
3769 call_fusage);
3770 if (must_copy)
3771 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3772 gen_rtx_CLOBBER (VOIDmode,
3773 slot),
3774 call_fusage);
3776 mode = Pmode;
3777 val = force_operand (XEXP (slot, 0), NULL_RTX);
3779 #endif
3781 argvec[count].value = val;
3782 argvec[count].mode = mode;
3784 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3786 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3787 argvec[count].partial
3788 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3789 #else
3790 argvec[count].partial = 0;
3791 #endif
3793 locate_and_pad_parm (mode, NULL_TREE,
3794 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3796 #else
3797 argvec[count].reg != 0,
3798 #endif
3799 NULL_TREE, &args_size, &argvec[count].offset,
3800 &argvec[count].size, &alignment_pad);
3802 if (argvec[count].size.var)
3803 abort ();
3805 if (reg_parm_stack_space == 0 && argvec[count].partial)
3806 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3808 if (argvec[count].reg == 0 || argvec[count].partial != 0
3809 || reg_parm_stack_space > 0)
3810 args_size.constant += argvec[count].size.constant;
3812 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3815 #ifdef FINAL_REG_PARM_STACK_SPACE
3816 reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3817 args_size.var);
3818 #endif
3819 /* If this machine requires an external definition for library
3820 functions, write one out. */
3821 assemble_external_libcall (fun);
3823 original_args_size = args_size;
3824 args_size.constant = (((args_size.constant
3825 + stack_pointer_delta
3826 + STACK_BYTES - 1)
3827 / STACK_BYTES
3828 * STACK_BYTES)
3829 - stack_pointer_delta);
3831 args_size.constant = MAX (args_size.constant,
3832 reg_parm_stack_space);
3834 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3835 args_size.constant -= reg_parm_stack_space;
3836 #endif
3838 if (args_size.constant > current_function_outgoing_args_size)
3839 current_function_outgoing_args_size = args_size.constant;
3841 if (ACCUMULATE_OUTGOING_ARGS)
3843 /* Since the stack pointer will never be pushed, it is possible for
3844 the evaluation of a parm to clobber something we have already
3845 written to the stack. Since most function calls on RISC machines
3846 do not use the stack, this is uncommon, but must work correctly.
3848 Therefore, we save any area of the stack that was already written
3849 and that we are using. Here we set up to do this by making a new
3850 stack usage map from the old one.
3852 Another approach might be to try to reorder the argument
3853 evaluations to avoid this conflicting stack usage. */
3855 needed = args_size.constant;
3857 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3858 /* Since we will be writing into the entire argument area, the
3859 map must be allocated for its entire size, not just the part that
3860 is the responsibility of the caller. */
3861 needed += reg_parm_stack_space;
3862 #endif
3864 #ifdef ARGS_GROW_DOWNWARD
3865 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3866 needed + 1);
3867 #else
3868 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3869 needed);
3870 #endif
3871 stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3873 if (initial_highest_arg_in_use)
3874 memcpy (stack_usage_map, initial_stack_usage_map,
3875 initial_highest_arg_in_use);
3877 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3878 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3879 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3880 needed = 0;
3882 /* We must be careful to use virtual regs before they're instantiated,
3883 and real regs afterwards. Loop optimization, for example, can create
3884 new libcalls after we've instantiated the virtual regs, and if we
3885 use virtuals anyway, they won't match the rtl patterns. */
3887 if (virtuals_instantiated)
3888 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3889 else
3890 argblock = virtual_outgoing_args_rtx;
3892 else
3894 if (!PUSH_ARGS)
3895 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3898 /* If we push args individually in reverse order, perform stack alignment
3899 before the first push (the last arg). */
3900 if (argblock == 0 && PUSH_ARGS_REVERSED)
3901 anti_adjust_stack (GEN_INT (args_size.constant
3902 - original_args_size.constant));
3904 if (PUSH_ARGS_REVERSED)
3906 inc = -1;
3907 argnum = nargs - 1;
3909 else
3911 inc = 1;
3912 argnum = 0;
3915 #ifdef REG_PARM_STACK_SPACE
3916 if (ACCUMULATE_OUTGOING_ARGS)
3918 /* The argument list is the property of the called routine and it
3919 may clobber it. If the fixed area has been used for previous
3920 parameters, we must save and restore it. */
3921 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3922 &low_to_save, &high_to_save);
3924 #endif
3926 /* Push the args that need to be pushed. */
3928 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3929 are to be pushed. */
3930 for (count = 0; count < nargs; count++, argnum += inc)
3932 enum machine_mode mode = argvec[argnum].mode;
3933 rtx val = argvec[argnum].value;
3934 rtx reg = argvec[argnum].reg;
3935 int partial = argvec[argnum].partial;
3936 int lower_bound = 0, upper_bound = 0, i;
3938 if (! (reg != 0 && partial == 0))
3940 if (ACCUMULATE_OUTGOING_ARGS)
3942 /* If this is being stored into a pre-allocated, fixed-size,
3943 stack area, save any previous data at that location. */
3945 #ifdef ARGS_GROW_DOWNWARD
3946 /* stack_slot is negative, but we want to index stack_usage_map
3947 with positive values. */
3948 upper_bound = -argvec[argnum].offset.constant + 1;
3949 lower_bound = upper_bound - argvec[argnum].size.constant;
3950 #else
3951 lower_bound = argvec[argnum].offset.constant;
3952 upper_bound = lower_bound + argvec[argnum].size.constant;
3953 #endif
3955 i = lower_bound;
3956 /* Don't worry about things in the fixed argument area;
3957 it has already been saved. */
3958 if (i < reg_parm_stack_space)
3959 i = reg_parm_stack_space;
3960 while (i < upper_bound && stack_usage_map[i] == 0)
3961 i++;
3963 if (i < upper_bound)
3965 /* We need to make a save area. See what mode we can make
3966 it. */
3967 enum machine_mode save_mode
3968 = mode_for_size (argvec[argnum].size.constant
3969 * BITS_PER_UNIT,
3970 MODE_INT, 1);
3971 rtx stack_area
3972 = gen_rtx_MEM
3973 (save_mode,
3974 memory_address
3975 (save_mode,
3976 plus_constant (argblock,
3977 argvec[argnum].offset.constant)));
3978 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3980 emit_move_insn (argvec[argnum].save_area, stack_area);
3984 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
3985 partial, reg, 0, argblock,
3986 GEN_INT (argvec[argnum].offset.constant),
3987 reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
3989 /* Now mark the segment we just used. */
3990 if (ACCUMULATE_OUTGOING_ARGS)
3991 for (i = lower_bound; i < upper_bound; i++)
3992 stack_usage_map[i] = 1;
3994 NO_DEFER_POP;
3998 /* If we pushed args in forward order, perform stack alignment
3999 after pushing the last arg. */
4000 if (argblock == 0 && !PUSH_ARGS_REVERSED)
4001 anti_adjust_stack (GEN_INT (args_size.constant
4002 - original_args_size.constant));
4004 if (PUSH_ARGS_REVERSED)
4005 argnum = nargs - 1;
4006 else
4007 argnum = 0;
4009 fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0, 0);
4011 /* Now load any reg parms into their regs. */
4013 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4014 are to be pushed. */
4015 for (count = 0; count < nargs; count++, argnum += inc)
4017 rtx val = argvec[argnum].value;
4018 rtx reg = argvec[argnum].reg;
4019 int partial = argvec[argnum].partial;
4021 /* Handle calls that pass values in multiple non-contiguous
4022 locations. The PA64 has examples of this for library calls. */
4023 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4024 emit_group_load (reg, val, GET_MODE_SIZE (GET_MODE (val)));
4025 else if (reg != 0 && partial == 0)
4026 emit_move_insn (reg, val);
4028 NO_DEFER_POP;
4031 /* Any regs containing parms remain in use through the call. */
4032 for (count = 0; count < nargs; count++)
4034 rtx reg = argvec[count].reg;
4035 if (reg != 0 && GET_CODE (reg) == PARALLEL)
4036 use_group_regs (&call_fusage, reg);
4037 else if (reg != 0)
4038 use_reg (&call_fusage, reg);
4041 /* Pass the function the address in which to return a structure value. */
4042 if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
4044 emit_move_insn (struct_value_rtx,
4045 force_reg (Pmode,
4046 force_operand (XEXP (mem_value, 0),
4047 NULL_RTX)));
4048 if (GET_CODE (struct_value_rtx) == REG)
4049 use_reg (&call_fusage, struct_value_rtx);
4052 /* Don't allow popping to be deferred, since then
4053 cse'ing of library calls could delete a call and leave the pop. */
4054 NO_DEFER_POP;
4055 valreg = (mem_value == 0 && outmode != VOIDmode
4056 ? hard_libcall_value (outmode) : NULL_RTX);
4058 /* Stack must be properly aligned now. */
4059 if (stack_pointer_delta & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))
4060 abort ();
4062 before_call = get_last_insn ();
4064 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4065 will set inhibit_defer_pop to that value. */
4066 /* The return type is needed to decide how many bytes the function pops.
4067 Signedness plays no role in that, so for simplicity, we pretend it's
4068 always signed. We also assume that the list of arguments passed has
4069 no impact, so we pretend it is unknown. */
4071 emit_call_1 (fun,
4072 get_identifier (XSTR (orgfun, 0)),
4073 build_function_type (tfom, NULL_TREE),
4074 original_args_size.constant, args_size.constant,
4075 struct_value_size,
4076 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
4077 valreg,
4078 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
4080 /* For calls to `setjmp', etc., inform flow.c it should complain
4081 if nonvolatile values are live. For functions that cannot return,
4082 inform flow that control does not fall through. */
4084 if (flags & (ECF_NORETURN | ECF_LONGJMP))
4086 /* The barrier note must be emitted
4087 immediately after the CALL_INSN. Some ports emit more than
4088 just a CALL_INSN above, so we must search for it here. */
4090 rtx last = get_last_insn ();
4091 while (GET_CODE (last) != CALL_INSN)
4093 last = PREV_INSN (last);
4094 /* There was no CALL_INSN? */
4095 if (last == before_call)
4096 abort ();
4099 emit_barrier_after (last);
4102 /* Now restore inhibit_defer_pop to its actual original value. */
4103 OK_DEFER_POP;
4105 /* If call is cse'able, make appropriate pair of reg-notes around it.
4106 Test valreg so we don't crash; may safely ignore `const'
4107 if return type is void. Disable for PARALLEL return values, because
4108 we have no way to move such values into a pseudo register. */
4109 if (flags & ECF_LIBCALL_BLOCK)
4111 rtx insns;
4113 if (valreg == 0)
4115 insns = get_insns ();
4116 end_sequence ();
4117 emit_insn (insns);
4119 else
4121 rtx note = 0;
4122 rtx temp;
4123 int i;
4125 if (GET_CODE (valreg) == PARALLEL)
4127 temp = gen_reg_rtx (outmode);
4128 emit_group_store (temp, valreg, outmode);
4129 valreg = temp;
4132 temp = gen_reg_rtx (GET_MODE (valreg));
4134 /* Construct an "equal form" for the value which mentions all the
4135 arguments in order as well as the function name. */
4136 for (i = 0; i < nargs; i++)
4137 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
4138 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
4140 insns = get_insns ();
4141 end_sequence ();
4143 if (flags & ECF_PURE)
4144 note = gen_rtx_EXPR_LIST (VOIDmode,
4145 gen_rtx_USE (VOIDmode,
4146 gen_rtx_MEM (BLKmode,
4147 gen_rtx_SCRATCH (VOIDmode))),
4148 note);
4150 emit_libcall_block (insns, temp, valreg, note);
4152 valreg = temp;
4155 pop_temp_slots ();
4157 /* Copy the value to the right place. */
4158 if (outmode != VOIDmode && retval)
4160 if (mem_value)
4162 if (value == 0)
4163 value = mem_value;
4164 if (value != mem_value)
4165 emit_move_insn (value, mem_value);
4167 else if (GET_CODE (valreg) == PARALLEL)
4169 if (value == 0)
4170 value = gen_reg_rtx (outmode);
4171 emit_group_store (value, valreg, outmode);
4173 else if (value != 0)
4174 emit_move_insn (value, valreg);
4175 else
4176 value = valreg;
4179 if (ACCUMULATE_OUTGOING_ARGS)
4181 #ifdef REG_PARM_STACK_SPACE
4182 if (save_area)
4183 restore_fixed_argument_area (save_area, argblock,
4184 high_to_save, low_to_save);
4185 #endif
4187 /* If we saved any argument areas, restore them. */
4188 for (count = 0; count < nargs; count++)
4189 if (argvec[count].save_area)
4191 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
4192 rtx stack_area
4193 = gen_rtx_MEM (save_mode,
4194 memory_address
4195 (save_mode,
4196 plus_constant (argblock,
4197 argvec[count].offset.constant)));
4199 emit_move_insn (stack_area, argvec[count].save_area);
4202 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4203 stack_usage_map = initial_stack_usage_map;
4206 return value;
4210 /* Output a library call to function FUN (a SYMBOL_REF rtx)
4211 (emitting the queue unless NO_QUEUE is nonzero),
4212 for a value of mode OUTMODE,
4213 with NARGS different arguments, passed as alternating rtx values
4214 and machine_modes to convert them to.
4215 The rtx values should have been passed through protect_from_queue already.
4217 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
4218 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
4219 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
4220 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
4221 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
4222 or other LCT_ value for other types of library calls. */
4224 void
4225 emit_library_call VPARAMS((rtx orgfun, enum libcall_type fn_type,
4226 enum machine_mode outmode, int nargs, ...))
4228 VA_OPEN (p, nargs);
4229 VA_FIXEDARG (p, rtx, orgfun);
4230 VA_FIXEDARG (p, int, fn_type);
4231 VA_FIXEDARG (p, enum machine_mode, outmode);
4232 VA_FIXEDARG (p, int, nargs);
4234 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4236 VA_CLOSE (p);
4239 /* Like emit_library_call except that an extra argument, VALUE,
4240 comes second and says where to store the result.
4241 (If VALUE is zero, this function chooses a convenient way
4242 to return the value.
4244 This function returns an rtx for where the value is to be found.
4245 If VALUE is nonzero, VALUE is returned. */
4248 emit_library_call_value VPARAMS((rtx orgfun, rtx value,
4249 enum libcall_type fn_type,
4250 enum machine_mode outmode, int nargs, ...))
4252 rtx result;
4254 VA_OPEN (p, nargs);
4255 VA_FIXEDARG (p, rtx, orgfun);
4256 VA_FIXEDARG (p, rtx, value);
4257 VA_FIXEDARG (p, int, fn_type);
4258 VA_FIXEDARG (p, enum machine_mode, outmode);
4259 VA_FIXEDARG (p, int, nargs);
4261 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4262 nargs, p);
4264 VA_CLOSE (p);
4266 return result;
4269 /* Store a single argument for a function call
4270 into the register or memory area where it must be passed.
4271 *ARG describes the argument value and where to pass it.
4273 ARGBLOCK is the address of the stack-block for all the arguments,
4274 or 0 on a machine where arguments are pushed individually.
4276 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4277 so must be careful about how the stack is used.
4279 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4280 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4281 that we need not worry about saving and restoring the stack.
4283 FNDECL is the declaration of the function we are calling.
4285 Return nonzero if this arg should cause sibcall failure,
4286 zero otherwise. */
4288 static int
4289 store_one_arg (arg, argblock, flags, variable_size, reg_parm_stack_space)
4290 struct arg_data *arg;
4291 rtx argblock;
4292 int flags;
4293 int variable_size ATTRIBUTE_UNUSED;
4294 int reg_parm_stack_space;
4296 tree pval = arg->tree_value;
4297 rtx reg = 0;
4298 int partial = 0;
4299 int used = 0;
4300 int i, lower_bound = 0, upper_bound = 0;
4301 int sibcall_failure = 0;
4303 if (TREE_CODE (pval) == ERROR_MARK)
4304 return 1;
4306 /* Push a new temporary level for any temporaries we make for
4307 this argument. */
4308 push_temp_slots ();
4310 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4312 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4313 save any previous data at that location. */
4314 if (argblock && ! variable_size && arg->stack)
4316 #ifdef ARGS_GROW_DOWNWARD
4317 /* stack_slot is negative, but we want to index stack_usage_map
4318 with positive values. */
4319 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4320 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4321 else
4322 upper_bound = 0;
4324 lower_bound = upper_bound - arg->size.constant;
4325 #else
4326 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4327 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4328 else
4329 lower_bound = 0;
4331 upper_bound = lower_bound + arg->size.constant;
4332 #endif
4334 i = lower_bound;
4335 /* Don't worry about things in the fixed argument area;
4336 it has already been saved. */
4337 if (i < reg_parm_stack_space)
4338 i = reg_parm_stack_space;
4339 while (i < upper_bound && stack_usage_map[i] == 0)
4340 i++;
4342 if (i < upper_bound)
4344 /* We need to make a save area. See what mode we can make it. */
4345 enum machine_mode save_mode
4346 = mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
4347 rtx stack_area
4348 = gen_rtx_MEM (save_mode,
4349 memory_address (save_mode,
4350 XEXP (arg->stack_slot, 0)));
4352 if (save_mode == BLKmode)
4354 tree ot = TREE_TYPE (arg->tree_value);
4355 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4356 | TYPE_QUAL_CONST));
4358 arg->save_area = assign_temp (nt, 0, 1, 1);
4359 preserve_temp_slots (arg->save_area);
4360 emit_block_move (validize_mem (arg->save_area), stack_area,
4361 expr_size (arg->tree_value),
4362 BLOCK_OP_CALL_PARM);
4364 else
4366 arg->save_area = gen_reg_rtx (save_mode);
4367 emit_move_insn (arg->save_area, stack_area);
4373 /* If this isn't going to be placed on both the stack and in registers,
4374 set up the register and number of words. */
4375 if (! arg->pass_on_stack)
4377 if (flags & ECF_SIBCALL)
4378 reg = arg->tail_call_reg;
4379 else
4380 reg = arg->reg;
4381 partial = arg->partial;
4384 if (reg != 0 && partial == 0)
4385 /* Being passed entirely in a register. We shouldn't be called in
4386 this case. */
4387 abort ();
4389 /* If this arg needs special alignment, don't load the registers
4390 here. */
4391 if (arg->n_aligned_regs != 0)
4392 reg = 0;
4394 /* If this is being passed partially in a register, we can't evaluate
4395 it directly into its stack slot. Otherwise, we can. */
4396 if (arg->value == 0)
4398 /* stack_arg_under_construction is nonzero if a function argument is
4399 being evaluated directly into the outgoing argument list and
4400 expand_call must take special action to preserve the argument list
4401 if it is called recursively.
4403 For scalar function arguments stack_usage_map is sufficient to
4404 determine which stack slots must be saved and restored. Scalar
4405 arguments in general have pass_on_stack == 0.
4407 If this argument is initialized by a function which takes the
4408 address of the argument (a C++ constructor or a C function
4409 returning a BLKmode structure), then stack_usage_map is
4410 insufficient and expand_call must push the stack around the
4411 function call. Such arguments have pass_on_stack == 1.
4413 Note that it is always safe to set stack_arg_under_construction,
4414 but this generates suboptimal code if set when not needed. */
4416 if (arg->pass_on_stack)
4417 stack_arg_under_construction++;
4419 arg->value = expand_expr (pval,
4420 (partial
4421 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4422 ? NULL_RTX : arg->stack,
4423 VOIDmode, EXPAND_STACK_PARM);
4425 /* If we are promoting object (or for any other reason) the mode
4426 doesn't agree, convert the mode. */
4428 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4429 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4430 arg->value, arg->unsignedp);
4432 if (arg->pass_on_stack)
4433 stack_arg_under_construction--;
4436 /* Don't allow anything left on stack from computation
4437 of argument to alloca. */
4438 if (flags & ECF_MAY_BE_ALLOCA)
4439 do_pending_stack_adjust ();
4441 if (arg->value == arg->stack)
4442 /* If the value is already in the stack slot, we are done. */
4444 else if (arg->mode != BLKmode)
4446 int size;
4448 /* Argument is a scalar, not entirely passed in registers.
4449 (If part is passed in registers, arg->partial says how much
4450 and emit_push_insn will take care of putting it there.)
4452 Push it, and if its size is less than the
4453 amount of space allocated to it,
4454 also bump stack pointer by the additional space.
4455 Note that in C the default argument promotions
4456 will prevent such mismatches. */
4458 size = GET_MODE_SIZE (arg->mode);
4459 /* Compute how much space the push instruction will push.
4460 On many machines, pushing a byte will advance the stack
4461 pointer by a halfword. */
4462 #ifdef PUSH_ROUNDING
4463 size = PUSH_ROUNDING (size);
4464 #endif
4465 used = size;
4467 /* Compute how much space the argument should get:
4468 round up to a multiple of the alignment for arguments. */
4469 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4470 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4471 / (PARM_BOUNDARY / BITS_PER_UNIT))
4472 * (PARM_BOUNDARY / BITS_PER_UNIT));
4474 /* This isn't already where we want it on the stack, so put it there.
4475 This can either be done with push or copy insns. */
4476 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4477 PARM_BOUNDARY, partial, reg, used - size, argblock,
4478 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
4479 ARGS_SIZE_RTX (arg->alignment_pad));
4481 /* Unless this is a partially-in-register argument, the argument is now
4482 in the stack. */
4483 if (partial == 0)
4484 arg->value = arg->stack;
4486 else
4488 /* BLKmode, at least partly to be pushed. */
4490 unsigned int parm_align;
4491 int excess;
4492 rtx size_rtx;
4494 /* Pushing a nonscalar.
4495 If part is passed in registers, PARTIAL says how much
4496 and emit_push_insn will take care of putting it there. */
4498 /* Round its size up to a multiple
4499 of the allocation unit for arguments. */
4501 if (arg->size.var != 0)
4503 excess = 0;
4504 size_rtx = ARGS_SIZE_RTX (arg->size);
4506 else
4508 /* PUSH_ROUNDING has no effect on us, because
4509 emit_push_insn for BLKmode is careful to avoid it. */
4510 excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
4511 + partial * UNITS_PER_WORD);
4512 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4513 NULL_RTX, TYPE_MODE (sizetype), 0);
4516 /* Some types will require stricter alignment, which will be
4517 provided for elsewhere in argument layout. */
4518 parm_align = MAX (PARM_BOUNDARY, TYPE_ALIGN (TREE_TYPE (pval)));
4520 /* When an argument is padded down, the block is aligned to
4521 PARM_BOUNDARY, but the actual argument isn't. */
4522 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4524 if (arg->size.var)
4525 parm_align = BITS_PER_UNIT;
4526 else if (excess)
4528 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4529 parm_align = MIN (parm_align, excess_align);
4533 if ((flags & ECF_SIBCALL) && GET_CODE (arg->value) == MEM)
4535 /* emit_push_insn might not work properly if arg->value and
4536 argblock + arg->offset areas overlap. */
4537 rtx x = arg->value;
4538 int i = 0;
4540 if (XEXP (x, 0) == current_function_internal_arg_pointer
4541 || (GET_CODE (XEXP (x, 0)) == PLUS
4542 && XEXP (XEXP (x, 0), 0) ==
4543 current_function_internal_arg_pointer
4544 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4546 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4547 i = INTVAL (XEXP (XEXP (x, 0), 1));
4549 /* expand_call should ensure this */
4550 if (arg->offset.var || GET_CODE (size_rtx) != CONST_INT)
4551 abort ();
4553 if (arg->offset.constant > i)
4555 if (arg->offset.constant < i + INTVAL (size_rtx))
4556 sibcall_failure = 1;
4558 else if (arg->offset.constant < i)
4560 if (i < arg->offset.constant + INTVAL (size_rtx))
4561 sibcall_failure = 1;
4566 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4567 parm_align, partial, reg, excess, argblock,
4568 ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
4569 ARGS_SIZE_RTX (arg->alignment_pad));
4571 /* Unless this is a partially-in-register argument, the argument is now
4572 in the stack.
4574 ??? Unlike the case above, in which we want the actual
4575 address of the data, so that we can load it directly into a
4576 register, here we want the address of the stack slot, so that
4577 it's properly aligned for word-by-word copying or something
4578 like that. It's not clear that this is always correct. */
4579 if (partial == 0)
4580 arg->value = arg->stack_slot;
4583 /* Mark all slots this store used. */
4584 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4585 && argblock && ! variable_size && arg->stack)
4586 for (i = lower_bound; i < upper_bound; i++)
4587 stack_usage_map[i] = 1;
4589 /* Once we have pushed something, pops can't safely
4590 be deferred during the rest of the arguments. */
4591 NO_DEFER_POP;
4593 /* ANSI doesn't require a sequence point here,
4594 but PCC has one, so this will avoid some problems. */
4595 emit_queue ();
4597 /* Free any temporary slots made in processing this argument. Show
4598 that we might have taken the address of something and pushed that
4599 as an operand. */
4600 preserve_temp_slots (NULL_RTX);
4601 free_temp_slots ();
4602 pop_temp_slots ();
4604 return sibcall_failure;
4607 /* Nonzero if we do not know how to pass TYPE solely in registers.
4608 We cannot do so in the following cases:
4610 - if the type has variable size
4611 - if the type is marked as addressable (it is required to be constructed
4612 into the stack)
4613 - if the padding and mode of the type is such that a copy into a register
4614 would put it into the wrong part of the register.
4616 Which padding can't be supported depends on the byte endianness.
4618 A value in a register is implicitly padded at the most significant end.
4619 On a big-endian machine, that is the lower end in memory.
4620 So a value padded in memory at the upper end can't go in a register.
4621 For a little-endian machine, the reverse is true. */
4623 bool
4624 default_must_pass_in_stack (mode, type)
4625 enum machine_mode mode;
4626 tree type;
4628 if (!type)
4629 return false;
4631 /* If the type has variable size... */
4632 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4633 return true;
4635 /* If the type is marked as addressable (it is required
4636 to be constructed into the stack)... */
4637 if (TREE_ADDRESSABLE (type))
4638 return true;
4640 /* If the padding and mode of the type is such that a copy into
4641 a register would put it into the wrong part of the register. */
4642 if (mode == BLKmode
4643 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4644 && (FUNCTION_ARG_PADDING (mode, type)
4645 == (BYTES_BIG_ENDIAN ? upward : downward)))
4646 return true;
4648 return false;